diff --git a/.dockerignore b/.dockerignore index f43228a4646..0ca00ae3b26 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,3 +4,4 @@ ./*.md ./bin ./ui/node_modules +./agent/tmp diff --git a/.env b/.env index 65564b822aa..32a9da2880a 100644 --- a/.env +++ b/.env @@ -1,110 +1,193 @@ # PLEASE DO NOT EDIT THIS FILE; modify '.env.override' file instead # This is in order to avoid conflict with upstream code when updating to a newer version -# Automatically increased once a new release is out -SHELLHUB_VERSION=v0.14.1 +# ShellHub version. +SHELLHUB_VERSION=v0.22.0-rc.1 -# The HTTP listen port for the ShellHub web-based GUI, API and Reverse SSH tunnel. -# Values: any free port on host +# The default log level for ShellHub. +# VALUES: https://pkg.go.dev/github.com/sirupsen/logrus#Level +SHELLHUB_LOG_LEVEL=info + +# The log format for ShellHub. +# VALUES: json, text. +SHELLHUB_LOG_FORMAT=text + +# The network interface to which the gateway container will bind. +# VALUES: Any valid IP address +SHELLHUB_BIND_ADDRESS=0.0.0.0 + +# The HTTP port for the ShellHub web-based UI, API, and Reverse SSH tunnel. +# VALUES: Any available port on the host SHELLHUB_HTTP_PORT=80 -# The HTTPS listen port for the ShellHub web-based GUI, API and Reverse SSH tunnel. -# Values: any free port on host +# The HTTPS port for the ShellHub web-based UI, API, and Reverse SSH tunnel. +# VALUES: Any available port on the host SHELLHUB_HTTPS_PORT=443 -# The SSH listen port for incoming SSH connections to devices -# Values: any free port on host +# The SSH port for incoming SSH connections to devices. +# VALUES: Any available port on the host SHELLHUB_SSH_PORT=22 -# Set this variable to true if you are running a Layer 4 load balancer with proxy protocol in front of ShellHub +# Set to true if using a Layer 4 load balancer with proxy protocol in front of ShellHub. SHELLHUB_PROXY=false -# Automatic HTTPS with Let's Encrypt +# Enable automatic HTTPS with Let's Encrypt. SHELLHUB_AUTO_SSL=false -# Domain of the server -# NOTICE: Only required if automatic HTTPS is enabled -# Values: a valid domain name +# The domain of the server. +# NOTICE: Required only if automatic HTTPS is enabled. +# VALUES: A valid domain name SHELLHUB_DOMAIN=localhost -# Public URL domain -# It is used to generate the public URL for accessing devices via HTTP -SHELLHUB_PUBLIC_URL_DOMAIN= +# The network name used by Docker Compose to avoid port collisions when running multiple instances. +# VALUES: A valid network name +SHELLHUB_NETWORK=shellhub_network + +# Enable web endpoints feature. +SHELLHUB_WEB_ENDPOINTS=false + +# The domain used to create the web endpoints. +# NOTICE: If empty, the [SHELLHUB_DOMAIN] will be used. +SHELLHUB_WEB_ENDPOINTS_DOMAIN= -# Enable geoip (geolocation) -# NOTICE: When true, SHELLHUB_MAXMIND_LICENSE is required -SHELLHUB_GEOIP=false +# The DNS provider used to generate wildcard SSL certificate. +# NOTICE: If empty, the gateway will be assumed as "digitalocean". +SHELLHUB_WEB_ENDPOINTS_DNS_PROVIDER=digitalocean -# GeoLite2 Maxmind license +# The token used to generate wildcard SSL certificate using DNS method for web endpoints' URL. +# Currently, only DigitalOcean and Cloudflare are supported. +# NOTICE: Required if SHELLHUB_AUTO_SSL is defined. +SHELLHUB_WEB_ENDPOINTS_DNS_PROVIDER_TOKEN= + +# Specifies an alternative mirror URL for downloading the GeoIP databases. This +# field takes precedence over SHELLHUB_MAXMIND_LICENSE; when both are +# configured, SHELLHUB_MAXMIND_MIRROR will be used as the primary source for +# database downloads. Leave both blank to disable the feature. +SHELLHUB_MAXMIND_MIRROR= + +# Specifies a MaxMind license key used to authenticate requests for downloading +# the GeoIP database directly from MaxMind. If SHELLHUB_MAXMIND_MIRROR is not +# set, this license key will be used as the fallback method for fetching the +# database. Leave both blank to disable the feature. SHELLHUB_MAXMIND_LICENSE= -# Set worker's schedule -# NOTICE: The format is the same as the Go implementation of https://pkg.go.dev/github.com/robfig/cron +# The schedule for worker tasks. +# NOTICE: Format follows Go's cron package (https://pkg.go.dev/github.com/robfig/cron). SHELLHUB_WORKER_SCHEDULE=@daily -# Enable billing feature -SHELLHUB_BILLING=false +# Internal setting for our cloud service. +SHELLHUB_CLOUD=false -# Recording session host +# The URL for the session recording host. SHELLHUB_RECORD_URL=api:8080 -# Records retention time in days -SHELLHUB_RECORD_RETENTION=0 - -# Session record cleanup worker schedule -SHELLHUB_SESSION_RECORD_CLEANUP_SCHEDULE=@daily - -# Enable ShellHub Enterprise features -# NOTE: You need a valid ShellHub Enterprise license file +# Enable ShellHub Enterprise features. +# NOTICE: Requires a valid ShellHub Enterprise license. SHELLHUB_ENTERPRISE=false -# ShellHub Enterprise Admin Console username +# The username for the ShellHub Enterprise Admin Console. SHELLHUB_ENTERPRISE_ADMIN_USERNAME= -# ShellHub Enterprise Admin Console password +# The password for the ShellHub Enterprise Admin Console. SHELLHUB_ENTERPRISE_ADMIN_PASSWORD= -# Internal to our cloud service. - don't worry about it -SHELLHUB_CLOUD=false - -# Set Go modules proxy cache URL (development only) -#SHELLHUB_GOPROXY=http://localhost:3333 +# The URL for the Go modules proxy cache (development only). +# SHELLHUB_GOPROXY=http://localhost:3333 -# Set NPM proxy cache URL (development only) -#SHELLHUB_NPM_REGISTRY=http://localhost:4873 +# The URL for the NPM proxy cache (development only). +# SHELLHUB_NPM_REGISTRY=http://localhost:4873 -# Billing configs +# Billing configuration +SHELLHUB_BILLING=stripe STRIPE_PUBLISHABLE_KEY= -SHELLHUB_BILLING_URL= -# Default log level for ShellHub -# Available levels: https://pkg.go.dev/github.com/sirupsen/logrus#Level -SHELLHUB_LOG_LEVEL="info" - -# Sentry DSN +# The Sentry DSN for error tracking. SHELLHUB_SENTRY_DSN= -# Receive announcements about releases, new features, critical security issues -# and relevant news from ShellHub Community Team. +# Enable announcements about releases, new features, critical security issues, and relevant news from ShellHub Community Team. SHELLHUB_ANNOUNCEMENTS=false -# Asynq configs +# Show features from Cloud/Enterprise versions in the ShellHub community edition. +SHELLHUB_PAYWALL=true + +# Asynq configuration # The maximum duration to wait before processing a group of tasks. SHELLHUB_ASYNQ_GROUP_MAX_DELAY=1 -# The grace period has configurable upper bound: you can set a maximum aggregation delay, after which Asynq server -# will aggregate the tasks regardless of the remaining grace period -SHELLHUB_ASNYQ_GROUP_GRACE_PERIOD=1 -# The maximum number of tasks that can be aggregated together. If that number is reached, Asynq server will aggregate -# the tasks immediately. -SHELLHUB_ASYNQ_GROUP_MAX_SIZE=500 - -# Enables container remote access -SHELLHUB_CONNECTOR=false - -# This variable specifies the namespace to which containers will be added when the container remote access is enabled -SHELLHUB_CONNECTOR_TENANT_ID= - -# Allows SSH to connect with an agent via a public key when the agent version is less than 0.6.0. -# Agents 0.5.x or earlier do not validate the public key request and may panic. -# Please refer to: https://github.com/shellhub-io/shellhub/issues/3453 + +# The grace period's upper bound for Asynq task aggregation. Must be greater than "SHELLHUB_ASYNQ_GROUP_MAX_DELAY" +SHELLHUB_ASYNQ_GROUP_GRACE_PERIOD=2 + +# The maximum number of tasks that can be aggregated together by Asynq. +SHELLHUB_ASYNQ_GROUP_MAX_SIZE=1000 + +# Defines the maximum duration, in hours, for which a unique job remains locked in the queue. +# If the job does not complete within this timeout, the lock is released, allowing a new instance +# of the job to be enqueued and executed. +# +# A value lower than or equal to 0 disables the uniqueness. +SHELLHUB_ASYNQ_UNIQUENESS_TIMEOUT=24 + +# Allow SSH connections with an agent via a public key for versions below 0.6.0. +# Values: true, false SHELLHUB_ALLOW_PUBLIC_KEY_ACCESS_BELLOW_0_6_0=false + +# The pool size for Redis cache connections. +# NOTICE: Zero means the default value of the Redis driver. +# VALUES: A non-negative integer +SHELLHUB_REDIS_CACHE_POOL_SIZE=0 + +# The maximum duration (in minutes) for blocking a source from login attempts. +# NOTICE: Set to 0 to disable. +# VALUES: A non-negative integer +SHELLHUB_MAXIMUM_ACCOUNT_LOCKOUT=60 + +# Enable Connector features on UI. +SHELLHUB_CONNECTOR=true + +# A secret used to encrypt the X.509 private key when ShellHub acts as an SAML Service Provider (SP) +# with signed authentication requests. +# +# Only used in enterprise. +SHELLHUB_SAML_SECRET= + +# Defines a rate limit for incoming requests to the ShellHub API. +SHELLHUB_API_RATE_LIMIT="1000r/s" + +# Defines the size of the shared memory zone used for rate limiting. +SHELLHUB_API_RATE_LIMIT_ZONE_SIZE=10m + +# Defines the maximum burst size for incoming requests to the ShellHub API. +# VALUES: A positive integer +SHELLHUB_API_BURST_SIZE=100 + +# Defines the delay strategy for handling bursts of incoming requests. +# VALUES: nodelay, or the number of requests to delay. +SHELLHUB_API_BURST_DELAY=nodelay + +# Defines if the metrics endpoint is enabled. +SHELLHUB_METRICS=false + +# Defines if empty passwords are allowed for SSH connections on the agent. +SHELLHUB_PERMIT_EMPTY_PASSWORDS=false + +# Defines the number of retry attempts for the internal HTTP client when a request fails. +SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_COUNT=3 + +# Sets the initial wait time (in seconds) before retrying a failed request. +SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_WAIT_TIME=5 + +# Specifies the maximum wait time (in seconds) between retries. +SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_MAX_WAIT_TIME=20 + +# The base URL for the API service. +SHELLHUB_INTERNAL_HTTP_CLIENT_API_BASE_URL=http://api:8080 + +# The base URL for the Enterprise service. +SHELLHUB_INTERNAL_HTTP_CLIENT_ENTERPRISE_BASE_URL=http://cloud:8080 + +# Set false to disable access logs for gateway nginx +SHELLHUB_GATEWAY_ACCESS_LOGS=true + +# The URL for the onboarding survey form. +# NOTICE: Leave empty to disable the onboarding survey. +SHELLHUB_ONBOARDING_URL=https://forms.infra.ossystems.io/s/f3fo9q3lkda8rrss9xpjus99 diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index 0ee4e8380c1..992076fcea7 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -1,27 +1,14 @@ name: Bug report -description: Create a bug report +description: Report a bug or issue +labels: ["kind/bug", "triage"] body: - type: textarea id: description attributes: - label: Description - validations: - required: true - - type: dropdown - id: edition - attributes: - label: Edition - description: Which edition of ShellHub are you running? - options: - - Community - - Enterprise - - Cloud - validations: - required: true - - type: input - id: version - attributes: - label: Version - description: Which version of ShellHub are you running? + label: Describe the issue + placeholder: | + What happened? What did you expect? + + Include steps to reproduce, version, logs, or anything else that might help. validations: required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 9baf440078a..abbfbeca037 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,8 +1,8 @@ blank_issues_enabled: false contact_links: - - name: Feature request - about: Propose any new features here on our ideas board + - name: Feature Request + about: Suggest a new feature or improvement url: https://github.com/shellhub-io/shellhub/discussions/categories/ideas - name: Question - about: Ask the community a question + about: Get help from the community url: https://github.com/shellhub-io/shellhub/discussions/categories/q-a diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 33e6ff2b13b..3f938a39758 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -29,6 +29,10 @@ updates: interval: weekly commit-message: prefix: "docker: ui" + ignore: + - dependency-name: "node" + update-types: ["version-update:semver-major"] + versions: [">=25"] - package-ecosystem: docker directory: "/agent" schedule: diff --git a/.github/workflows/build-agent.yml b/.github/workflows/build-agent.yml index 78e6f9626ed..a5a0ba3cf34 100644 --- a/.github/workflows/build-agent.yml +++ b/.github/workflows/build-agent.yml @@ -17,28 +17,11 @@ on: jobs: build: - runs-on: ubuntu-20.04 - - strategy: - matrix: - dockerfile: [amd64, arm32v6, arm32v7, arm64v8, i386] - include: - # includes a new variable of npm with a value of 2 - # for the matrix leg matching the os and version - - dockerfile: amd64 - arch: amd64 - - dockerfile: arm32v6 - arch: arm/6 - - dockerfile: arm32v7 - arch: arm/7 - - dockerfile: arm64v8 - arch: arm64/8 - - dockerfile: i386 - arch: i386 + runs-on: ubuntu-24.04 steps: - name: checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: login to docker hub if: "github.event_name != 'pull_request'" @@ -53,150 +36,162 @@ jobs: - name: Set github reference env run: echo RELEASE_VERSION=${GITHUB_REF#refs/*/} >> $GITHUB_ENV - - name: Set docker build option env - if: "github.event_name != 'pull_request'" - run: echo "BUILD_OPTION=--provenance=false --push" >> $GITHUB_ENV - - - name: build image - if: "!contains(github.ref, 'refs/tags/v')" + - name: Set image tag run: | - docker buildx build ${{ env.BUILD_OPTION }} -f agent/Dockerfile.${{ matrix.dockerfile }} \ - --tag shellhubio/agent:${{ github.sha }}-${{ matrix.dockerfile }} \ - --platform linux/${{ matrix.arch }} . - - - name: build image from github tag - if: "contains(github.ref, 'refs/tags/v')" + if [[ "${{ github.ref }}" == refs/tags/v* ]]; then + echo "IMAGE_TAG=${{ env.RELEASE_VERSION }},latest" >> $GITHUB_ENV + echo "BUILD_ARGS=--build-arg SHELLHUB_VERSION=${{ env.RELEASE_VERSION }}" >> $GITHUB_ENV + else + echo "IMAGE_TAG=${{ github.sha }}" >> $GITHUB_ENV + echo "BUILD_ARGS=" >> $GITHUB_ENV + fi + + - name: Build and push multiarch image run: | - docker buildx build ${{ env.BUILD_OPTION }} -f agent/Dockerfile.${{ matrix.dockerfile }} \ - --tag shellhubio/agent:${{ env.RELEASE_VERSION }}-${{ matrix.dockerfile }} \ - --build-arg SHELLHUB_VERSION=${{ env.RELEASE_VERSION }} \ - --platform linux/${{ matrix.arch }} . - - - name: export rootfs + TAGS="" + IFS=',' read -ra TAG_ARRAY <<< "${{ env.IMAGE_TAG }}" + for tag in "${TAG_ARRAY[@]}"; do + TAGS="$TAGS --tag shellhubio/agent:$tag" + done + + docker buildx build \ + --platform linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6,linux/386 \ + --file agent/Dockerfile \ + $TAGS \ + ${{ env.BUILD_ARGS }} \ + ${{ github.event_name != 'pull_request' && '--push' || '' }} \ + --provenance=false \ + . + + - name: Export architecture-specific rootfs if: "contains(github.ref, 'refs/tags/v')" run: | - docker export $(docker create shellhubio/agent:${{ env.RELEASE_VERSION }}-${{ matrix.dockerfile }}) | gzip > rootfs-${{ matrix.dockerfile }}.tar.gz - - - name: upload rootfs artifact + mkdir -p rootfs + for arch in amd64 arm64v8 armv7 armv6 i386; do + case $arch in + amd64) platform="linux/amd64" ;; + arm64v8) platform="linux/arm64/v8" ;; + armv7) platform="linux/arm/v7" ;; + armv6) platform="linux/arm/v6" ;; + i386) platform="linux/386" ;; + esac + + # Build single architecture image for export + docker buildx build \ + --platform $platform \ + --file agent/Dockerfile \ + --tag shellhubio/agent:${{ env.RELEASE_VERSION }}-$arch \ + ${{ env.BUILD_ARGS }} \ + --load \ + --provenance=false \ + . + + # Export rootfs + docker export $(docker create shellhubio/agent:${{ env.RELEASE_VERSION }}-$arch) | gzip > rootfs/rootfs-$arch.tar.gz + done + + - name: Upload rootfs artifacts if: "contains(github.ref, 'refs/tags/v')" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: - name: rootfs-${{ matrix.dockerfile }} - path: rootfs-${{ matrix.dockerfile }}.tar.gz + name: rootfs-artifacts + path: rootfs/*.tar.gz - multiarch: - if: github.event_name != 'pull_request' - needs: build - runs-on: ubuntu-latest + build-binaries: + if: "contains(github.ref, 'refs/tags/v')" + needs: build + runs-on: ubuntu-24.04 + strategy: + matrix: + include: + - goos: linux + goarch: amd64 + - goos: linux + goarch: arm64 + - goos: linux + goarch: arm + goarm: 7 + - goos: linux + goarch: arm + goarm: 6 + - goos: linux + goarch: 386 steps: - name: checkout code - uses: actions/checkout@v4 - - - name: Set github reference env - run: echo RELEASE_VERSION=${GITHUB_REF#refs/*/} >> $GITHUB_ENV - - - name: login to docker hub - run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin - - - name: build multiarch image - if: "!contains(github.ref, 'refs/tags/v')" - env: - DOCKER_CLI_EXPERIMENTAL: enabled - run: | - echo "Creating the manifest" - docker manifest create -a shellhubio/agent:${{ github.sha }} shellhubio/agent:${{ github.sha }}-amd64 shellhubio/agent:${{ github.sha }}-arm32v6 shellhubio/agent:${{ github.sha }}-arm32v7 shellhubio/agent:${{ github.sha }}-arm64v8 - - echo "Running amd64 manifest annotation" - docker manifest annotate shellhubio/agent:${{ github.sha }} shellhubio/agent:${{ github.sha }}-amd64 --os linux --arch amd64 + uses: actions/checkout@v6 - archname=(arm32v6 arm32v7 arm64v8) - arch=(arm arm arm64) - variant=(v6 v7 v8) - - for ((i = 0; i < 3; i++)) ; do - echo "Running ${archname[i]} manifest annotation" - docker manifest annotate shellhubio/agent:${{ github.sha }} shellhubio/agent:${{ github.sha }}-${archname[i]} --os linux --arch ${arch[i]} --variant ${variant[i]} - done + - name: setup go + uses: actions/setup-go@v6 + with: + go-version: '1.24.9' - echo "Pushing multiarch manifest" - docker manifest push shellhubio/agent:${{ github.sha }} + - name: Set release version + run: echo RELEASE_VERSION=${GITHUB_REF#refs/*/} >> $GITHUB_ENV - - name: build multiarch image from github tag - if: contains(github.ref, 'refs/tags/v') + - name: build agent binary env: - DOCKER_CLI_EXPERIMENTAL: enabled + CGO_ENABLED: 0 + GOOS: ${{ matrix.goos }} + GOARCH: ${{ matrix.goarch }} + GOARM: ${{ matrix.goarm }} run: | - echo "Creating the manifest" - docker manifest create -a shellhubio/agent:${{ env.RELEASE_VERSION }} shellhubio/agent:${{ env.RELEASE_VERSION }}-amd64 shellhubio/agent:${{ env.RELEASE_VERSION }}-arm32v6 shellhubio/agent:${{ env.RELEASE_VERSION }}-arm32v7 shellhubio/agent:${{ env.RELEASE_VERSION }}-arm64v8 - - echo "Running amd64 manifest annotation" - docker manifest annotate shellhubio/agent:${{ env.RELEASE_VERSION }} shellhubio/agent:${{ env.RELEASE_VERSION }}-amd64 --os linux --arch amd64 - - archname=(arm32v6 arm32v7 arm64v8) - arch=(arm arm arm64) - variant=(v6 v7 v8) - - for ((i = 0; i < 3; i++)) ; do - echo "Running ${archname[i]} manifest annotation" - docker manifest annotate shellhubio/agent:${{ env.RELEASE_VERSION }} shellhubio/agent:${{ env.RELEASE_VERSION }}-${archname[i]} --os linux --arch ${arch[i]} --variant ${variant[i]} - done - - echo "Pushing multiarch manifest" - docker manifest push shellhubio/agent:${{ env.RELEASE_VERSION }} + cd agent + BINARY_NAME="shellhub-agent-${{ matrix.goos }}-${{ matrix.goarch }}" + if [ -n "${{ matrix.goarm }}" ]; then + BINARY_NAME="${BINARY_NAME}v${{ matrix.goarm }}" + fi + go build -ldflags "-s -w -X main.AgentVersion=${{ env.RELEASE_VERSION }}" -o "$BINARY_NAME" . + gzip "$BINARY_NAME" + + - name: upload binary artifact + uses: actions/upload-artifact@v6 + with: + name: agent-binary-${{ matrix.goos }}-${{ matrix.goarch }}${{ matrix.goarm && format('v{0}', matrix.goarm) || '' }} + path: agent/shellhub-agent-*.gz vendored-tarball: if: "contains(github.ref, 'refs/tags/v')" needs: build - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - name: checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: export agent tarball run: | cd ./agent && go mod vendor && cd .. && tar czf shellhub-agent.tar.gz agent - name: upload agent tarball artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: shellhub-agent path: shellhub-agent.tar.gz draft: if: "contains(github.ref, 'refs/tags/v')" - needs: vendored-tarball + needs: [build-binaries, vendored-tarball] runs-on: ubuntu-latest steps: - - name: download amd64 - uses: actions/download-artifact@v4 - with: - name: rootfs-amd64 - - name: download arm32v6 - uses: actions/download-artifact@v4 - with: - name: rootfs-arm32v6 - - name: download arm32v7 - uses: actions/download-artifact@v4 - with: - name: rootfs-arm32v7 - - name: download arm64v8 - uses: actions/download-artifact@v4 + - name: download rootfs artifacts + uses: actions/download-artifact@v7 with: - name: rootfs-arm64v8 - - name: download i386 - uses: actions/download-artifact@v4 + name: rootfs-artifacts + - name: download agent binaries + uses: actions/download-artifact@v7 with: - name: rootfs-i386 + pattern: agent-binary-* + merge-multiple: true - name: download vendored tarball - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v7 with: name: shellhub-agent - name: release draft - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: draft: true generate_release_notes: true files: | rootfs-*.tar.gz + shellhub-agent-*.gz shellhub-agent.tar.gz diff --git a/.github/workflows/commit.yml b/.github/workflows/commit.yml index 2a068c533ca..e73a0ff0e3e 100644 --- a/.github/workflows/commit.yml +++ b/.github/workflows/commit.yml @@ -11,7 +11,7 @@ jobs: check-commit-message: if: ${{ github.event.pull_request.user.login != 'dependabot[bot]' && github.event.pull_request.draft == false }} name: Check Commit Message - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - name: Check Commit Type uses: gsactions/commit-message-checker@v2 diff --git a/.github/workflows/dependabot_pr.yml b/.github/workflows/dependabot_pr.yml index 22208ff0b21..865d2a7cc06 100644 --- a/.github/workflows/dependabot_pr.yml +++ b/.github/workflows/dependabot_pr.yml @@ -11,16 +11,16 @@ jobs: go-mod-tidy: name: Update Go modules if: contains(github.head_ref, 'dependabot/go_modules/') && github.event.pull_request.user.login == 'dependabot[bot]' - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: ref: ${{ github.head_ref }} fetch-depth: 2 token: ${{ secrets.PAT }} - - uses: actions/setup-go@v5 + - uses: actions/setup-go@v6 with: - go-version: "1.20" + go-version: "1.24.9" - name: Run prepare-release script to update Go modules run: | ./devscripts/prepare-release @@ -29,7 +29,7 @@ jobs: run: | echo "msg=$(git log -1 --pretty=%s)" >> $GITHUB_OUTPUT - name: Commit changes - uses: stefanzweifel/git-auto-commit-action@v5 + uses: stefanzweifel/git-auto-commit-action@v7 with: branch: ${{ github.event.workflow_run.head_branch }} file_pattern: '**/go.mod **/go.sum' diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index b3e6ffd432b..0921b0237a8 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -18,16 +18,23 @@ jobs: strategy: fail-fast: false matrix: - project: [api, ssh, gateway, ui, cli, connector] + project: [api, ssh, gateway, ui, cli] - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - name: Checkout source code - uses: actions/checkout@v4 + uses: actions/checkout@v6 + + - name: Login to Docker Registry + uses: docker/login-action@v3 + with: + registry: registry.infra.ossystems.io + username: ${{ secrets.PORTUS_USERNAME }} + password: ${{ secrets.PORTUS_TOKEN }} - name: Build '${{ matrix.project }}' Docker container - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: tags: shellhubio/${{ matrix.project }}:latest push: false diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 58ac170fd23..03f141d21d5 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -12,13 +12,13 @@ jobs: strategy: fail-fast: true matrix: - project: [api, ssh, gateway, ui, cli, connector] + project: [api, ssh, gateway, ui, cli] - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - name: Checkout source code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Get release version id: get_version diff --git a/.github/workflows/openapi-cd.yml b/.github/workflows/openapi-cd.yml new file mode 100644 index 00000000000..15373633983 --- /dev/null +++ b/.github/workflows/openapi-cd.yml @@ -0,0 +1,61 @@ +name: OpenAPI Continuous Delivery + +on: + release: + types: [published] + + workflow_dispatch: + +jobs: + openapi-bundle: + name: Bundle OpenAPI Specifications + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + + - name: Setup Node.js environment + uses: actions/setup-node@v6.2.0 + with: + node-version: "18.1" + + - name: Setup bundler + run: | + npm install -g @redocly/cli@1.0.0-beta.100 + mkdir bundled/ + + - name: Bundle + run: redocly bundle openapi/spec/openapi.yaml -o bundled/openapi.yaml + + - name: Upload artifacts + uses: actions/upload-artifact@v6 + with: + name: bundled + path: bundled/ + + openapi-delivery: + name: Deliver OpenAPI Specifications + runs-on: ubuntu-latest + needs: + - openapi-bundle + + steps: + - uses: actions/checkout@v6 + + - name: Setup uploader + run: mkdir bundled/ + + - name: Download artificats + uses: actions/download-artifact@v7 + with: + name: bundled + path: bundled/ + + - name: Delivery OpenAPI to Space + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.SPACE_ACCESS_KEY }} + secret_key: ${{ secrets.SPACE_SECRET_KEY }} + space_name: ${{ secrets.SPACE_NAME }} + space_region: ${{ secrets.SPACE_REGION }} + source: bundled/ diff --git a/.github/workflows/openapi-ci.yml b/.github/workflows/openapi-ci.yml new file mode 100644 index 00000000000..0edd9d36637 --- /dev/null +++ b/.github/workflows/openapi-ci.yml @@ -0,0 +1,37 @@ +name: OpenAPI Continuous Integration + +on: + push: + branches: [master] + paths: + - 'openapi/**' + pull_request: + branches: [master] + paths: + - 'openapi/**' + + workflow_dispatch: + +jobs: + openapi-ci: + name: OpenAPI CI + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + + - name: Setup Node.js environment + uses: actions/setup-node@v6.2.0 + with: + node-version: "18.1" + - name: Setup Prettier + run: npm install -g prettier@2.8.7 + + - name: Run Prettier lint on project + run: prettier -c openapi/spec/**/*.yaml + + - name: Setup Redocly + run: npm install -g @redocly/cli@1.0.0-beta.100 + + - name: Run Redocly lint + run: redocly lint openapi/spec/openapi.yaml diff --git a/.github/workflows/qa.yml b/.github/workflows/qa.yml index 1e4484091ca..c8c02b11fbb 100644 --- a/.github/workflows/qa.yml +++ b/.github/workflows/qa.yml @@ -16,7 +16,7 @@ jobs: strategy: fail-fast: false matrix: - project: [api, agent, pkg, ssh, ui, cli, connector] + project: [api, agent, pkg, ssh, ui, cli] include: - project: api extra_args: "" @@ -36,7 +36,7 @@ jobs: - project: cli extra_args: "" lint_args: "" - - project: connector + - project: tests extra_args: "" lint_args: "" @@ -44,9 +44,16 @@ jobs: steps: - name: Check out code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - - uses: dorny/paths-filter@v2 + - name: Login to Docker Registry + uses: docker/login-action@v3 + with: + registry: registry.infra.ossystems.io + username: ${{ secrets.PORTUS_USERNAME }} + password: ${{ secrets.PORTUS_TOKEN }} + + - uses: dorny/paths-filter@v3 id: filter with: # inline YAML or path to separate file (e.g.: .github/filters.yaml) @@ -59,22 +66,15 @@ jobs: - 'pkg/**' - 'ssh/**' - 'cli/**' - - 'connector/**' + - 'tests/**' - name: Set up Go 1.x [Go] if: matrix.project != 'ui' && steps.filter.outputs.go == 'true' && github.event.pull_request.draft == false - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "1.20" + go-version: "1.24.9" id: go - - name: Cache Go files [Go] - uses: actions/cache@v3 - if: matrix.project != 'ui' && steps.filter.outputs.go == 'true' && github.event.pull_request.draft == false - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - - name: Get Go dependencies [Go] if: matrix.project != 'ui' && steps.filter.outputs.go == 'true' && github.event.pull_request.draft == false working-directory: ${{ matrix.project }} @@ -90,38 +90,40 @@ jobs: exit 1 fi - - name: Unit test [Go] - if: matrix.project != 'ui' && steps.filter.outputs.go == 'true' && github.event.pull_request.draft == false - working-directory: ${{ matrix.project }} - run: go test -v ${{ matrix.extra_args }} ./... - - - name: Go build [Go] - if: matrix.project != 'ui' && steps.filter.outputs.go == 'true' && github.event.pull_request.draft == false - working-directory: ${{ matrix.project }} - run: go build -v ${{ matrix.extra_args }} ./... - - name: Code format [Go] run: if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then exit 1; fi if: matrix.os == 'ubuntu-latest' && github.event.pull_request.draft == false - name: Code linting [Go] if: matrix.project != 'ui' && steps.filter.outputs.go == 'true' && github.event.pull_request.draft == false - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v9 with: working-directory: ${{ matrix.project }} - version: v1.53.3 - args: ${{ matrix.lint_args }} ./... - skip-pkg-cache: true + version: v2.1.6 + args: --timeout 2m ${{ matrix.lint_args }} ./... + skip-cache: true + + - name: Unit test [Go] + if: matrix.project != 'ui' && steps.filter.outputs.go == 'true' && github.event.pull_request.draft == false + working-directory: ${{ matrix.project }} + run: go test ${{ matrix.extra_args }} -timeout 25m ./... + env: + TESTCONTAINERS_RYUK_DISABLED: true + + - name: Go build [Go] + if: matrix.project != 'ui' && matrix.project != 'tests' && steps.filter.outputs.go == 'true' && github.event.pull_request.draft == false + working-directory: ${{ matrix.project }} + run: go build -v ${{ matrix.extra_args }} ./... - name: Set up Node.JS 21.4.0 [UI] if: matrix.project == 'ui' && steps.filter.outputs.ui == 'true' && github.event.pull_request.draft == false - uses: actions/setup-node@v4 + uses: actions/setup-node@v6.2.0 with: node-version: "21.4.0" - name: Cache node modules [UI] if: matrix.project == 'ui' && steps.filter.outputs.ui == 'true' && github.event.pull_request.draft == false - uses: actions/cache@v3 + uses: actions/cache@v5 with: path: ui/node_modules key: ${{ runner.OS }}-ui-${{ hashFiles('**/package-lock.json') }} @@ -140,3 +142,8 @@ jobs: if: matrix.project == 'ui' && steps.filter.outputs.ui == 'true' && github.event.pull_request.draft == false working-directory: ${{ matrix.project }} run: npm run lint + + - name: Build [UI] + if: matrix.project == 'ui' && steps.filter.outputs.ui == 'true' && github.event.pull_request.draft == false + working-directory: ${{ matrix.project }} + run: npm run build diff --git a/.gitignore b/.gitignore index c6ed67f52f6..1eaa3bd2f32 100644 --- a/.gitignore +++ b/.gitignore @@ -1,18 +1,28 @@ +.env.override +docker-compose.override.yml + *~ +*/tmp +*/node_modules +*.orig + api_private_key api_public_key ssh_private_key -/go.sum -/ui/node_modules/ -*.orig -.env.override -docker-compose.override.yml + +go.work +go.work.sum + +# Docker binaries +agent/agent api/api +cli/cli +gateway/gateway ssh/ssh -agent/agent + agent/shellhub.key -cli/cli -go.work -go.work.sum -connector/.keys -connector/connector + +# Directory used by devscripts/run-agent to store binaries +bin/agent + +agent/latest diff --git a/.golangci.yaml b/.golangci.yaml index ce887d97e6c..4cab4ec797d 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -1,48 +1,61 @@ +version: "2" + output: - format: colored-line-number - print-issued-lines: true - print-linter-name: true + formats: + text: + print-issued-lines: true + print-linter-name: true + +issues: uniq-by-line: true - sort-results: true + +formatters: + enable: + - gofumpt + - goimports linters: - disable-all: true + disable: + - errcheck enable: - - gofmt - revive - - gosimple - govet - ineffassign - - exportloopref + - copyloopvar - staticcheck - unconvert - unused - misspell - - goimports - - typecheck - - errcheck - whitespace - depguard - - stylecheck - nlreturn - nilerr + - gosec -linters-settings: - staticcheck: - checks: ["all", "-SA1029"] - depguard: - rules: - all: - deny: - - pkg: io/ioutil - desc: 'io/ioutil package has been deprecated' - - pkg: github.com/google/uuid - desc: "Uuid generation is only allowed using 'github.com/shellhub-io/shellhub/pkg/uuid'" - - pkg: github.com/dgrijalva/jwt-go - desc: "dgrijalva/jwt-go is deprecated please use 'github.com/golang-jwt/jwt'" - whitespace: - multi-if: true - multi-func: true - govet: - disable: - - composites + settings: + gosec: + excludes: + - G104 + - G301 + - G302 + - G304 + revive: + severity: warning + staticcheck: + checks: ["all", "-SA1029", "-ST1020", "-ST1021", "-ST1022", "-ST1000", "-QF1008"] + depguard: + rules: + all: + deny: + - pkg: io/ioutil + desc: 'io/ioutil package has been deprecated' + - pkg: github.com/google/uuid + desc: "Uuid generation is only allowed using 'github.com/shellhub-io/shellhub/pkg/uuid'" + - pkg: github.com/dgrijalva/jwt-go + desc: "dgrijalva/jwt-go is deprecated please use 'github.com/golang-jwt/jwt'" + whitespace: + multi-if: true + multi-func: true + govet: + disable: + - composites diff --git a/Makefile b/Makefile index bf0985bb21b..32a623be35a 100644 --- a/Makefile +++ b/Makefile @@ -18,7 +18,7 @@ export COMPOSE_TEMPLATE # Generate required private key for api service api_private_key: - @$(KEYGEN) genrsa -out api_private_key 2048 + @$(KEYGEN) genpkey -algorithm RSA -out api_private_key -pkeyopt rsa_keygen_bits:2048 # Generate required public key for api service api_public_key: @@ -26,7 +26,7 @@ api_public_key: # Generate required private key for ssh service ssh_private_key: - @$(KEYGEN) genrsa -out ssh_private_key 2048 + @$(KEYGEN) genpkey -algorithm RSA -out ssh_private_key -pkeyopt rsa_keygen_bits:2048 .PHONY: keygen # Generate required keys diff --git a/agent/.air.toml b/agent/.air.toml new file mode 100644 index 00000000000..2c283231435 --- /dev/null +++ b/agent/.air.toml @@ -0,0 +1,32 @@ +root = "../" +tmp_dir = "tmp" + +[build] +pre_cmd = [] +cmd = "go build -tags docker -ldflags \"-X main.AgentVersion=latest\" -gcflags=\"all=-N -l\" -o ./tmp/main ." +post_cmd = [] +bin = "" +full_bin = "dlv exec ./tmp/main" +args_bin = [ + "--listen=0.0.0.0:2345", + "--headless", + "--continue", + "--accept-multiclient", +] +delay = 500 +exclude_dir = ["assets", "tmp", "vendor", "testdata"] +exclude_file = [] +exclude_regex = ["_test.go"] +exclude_unchanged = false +follow_symlink = false +include_dir = [] +include_ext = ["go", "tpl", "tmpl", "html"] +include_file = [] +kill_delay = "0s" +log = "build-errors.log" +poll = false +poll_interval = 0 +rerun = false +rerun_delay = 500 +send_interrupt = false +stop_on_error = false diff --git a/agent/Dockerfile b/agent/Dockerfile index 238f5365206..eda91952018 100644 --- a/agent/Dockerfile +++ b/agent/Dockerfile @@ -1,18 +1,19 @@ -# base stage -FROM golang:1.20.4-alpine3.16 AS base +# Unified Dockerfile for ShellHub Agent +# Default build: production multiarch +# Development: use --target development -ARG GOPROXY +ARG GOLANG_VERSION=1.24 +ARG ALPINE_VERSION=3.22 -RUN apk add --update git ca-certificates build-base bash util-linux setpriv perl xz +# Base stage for both development and production +FROM --platform=$BUILDPLATFORM golang:${GOLANG_VERSION:-1.24}-alpine${ALPINE_VERSION:-3.22} AS base -# We are using libxcrypt to support yescrypt password hashing method -# Since libxcrypt package is not available in Alpine, so we need to build libxcrypt from source code -RUN wget -q https://github.com/besser82/libxcrypt/releases/download/v4.4.27/libxcrypt-4.4.27.tar.xz && \ - tar xvf libxcrypt-4.4.27.tar.xz && cd libxcrypt-4.4.27 && \ - ./configure --prefix /usr && make -j$(nproc) && make install && \ - cd .. && rm -rf libxcrypt-4.4.27* +ARG GOPROXY +ARG TARGETARCH +ARG TARGETOS=linux -RUN ln -sf /bin/bash /bin/sh +# Install Go BUILD architecture dependencies +RUN apk add --no-cache ca-certificates WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub @@ -24,11 +25,13 @@ COPY ./agent/go.mod ./agent/go.sum ./ RUN go mod download -# builder stage +# Builder stage for production FROM base AS builder ARG SHELLHUB_VERSION=latest ARG GOPROXY +ARG TARGETARCH +ARG TARGETOS=linux COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg COPY ./agent . @@ -39,7 +42,14 @@ RUN go mod download WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent -RUN go build -tags docker -ldflags "-X main.AgentVersion=${SHELLHUB_VERSION}" +# Cross-compile for target architecture +RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -tags docker -ldflags "-s -w -X main.AgentVersion=${SHELLHUB_VERSION}" -o agent + +# Runtime utilities stage - CRITICAL: must use target platform +FROM --platform=$TARGETPLATFORM alpine:${ALPINE_VERSION:-3.22} AS runtime-utils + +# Install runtime binaries for the TARGET architecture +RUN apk add --no-cache util-linux setpriv ca-certificates # development stage FROM base AS development @@ -47,9 +57,10 @@ FROM base AS development ARG GOPROXY ENV GOPROXY ${GOPROXY} -RUN apk add --update openssl openssh-client -RUN go install github.com/markbates/refresh@v1.11.1 && \ - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.53.3 +RUN apk add --update openssl openssh-client util-linux setpriv +RUN go install github.com/air-verse/air@v1.62 && \ + go install github.com/go-delve/delve/cmd/dlv@v1.25 && \ + go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.1.6 WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub @@ -63,10 +74,25 @@ WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent ENTRYPOINT ["/entrypoint.sh"] -# production stage -FROM alpine:3.19.0 AS production +# DEFAULT STAGE: Production multiarch build (scratch-based) +FROM scratch + +ARG TARGETARCH + +# Copy CA certificates from runtime-utils +COPY --from=runtime-utils /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ + +# Copy required utilities from runtime-utils - NOT from builder +COPY --from=runtime-utils /usr/bin/nsenter /usr/bin/ +COPY --from=runtime-utils /bin/setpriv /bin/ + +# Copy shared libraries from runtime-utils +COPY --from=runtime-utils /usr/lib/libcap-ng.so.* /usr/lib/ + +# Copy musl loader from runtime-utils +COPY --from=runtime-utils /lib/ld-musl-*.so.1 /lib/ -WORKDIR /app -COPY --from=builder /agent /app/ +# Copy the agent binary +COPY --from=builder /go/src/github.com/shellhub-io/shellhub/agent/agent /bin/agent -ENTRYPOINT ./agent +ENTRYPOINT ["/bin/agent"] \ No newline at end of file diff --git a/agent/Dockerfile.amd64 b/agent/Dockerfile.amd64 deleted file mode 100644 index e2f4b99c355..00000000000 --- a/agent/Dockerfile.amd64 +++ /dev/null @@ -1,43 +0,0 @@ -FROM golang:1.20.4-alpine3.16 - -ARG SHELLHUB_VERSION=latest - -RUN apk add --update git ca-certificates util-linux build-base bash setpriv perl xz - -# We are using libxcrypt to support yescrypt password hashing method -# Since libxcrypt package is not available in Alpine, so we need to build libxcrypt from source code -RUN wget -q https://github.com/besser82/libxcrypt/releases/download/v4.4.27/libxcrypt-4.4.27.tar.xz && \ - tar xvf libxcrypt-4.4.27.tar.xz && cd libxcrypt-4.4.27 && \ - ./configure --prefix /usr && make -j$(nproc) && make install && \ - cd .. && rm -rf libxcrypt-4.4.27* - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub - -COPY ./go.mod ./ - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -COPY ./agent/go.mod ./agent/go.sum ./ - -RUN go mod download - -COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg -COPY ./agent . - -RUN go mod download - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -RUN GOOS=linux GOARCH=amd64 go build -tags docker -ldflags "-X main.AgentVersion=${SHELLHUB_VERSION}" - -FROM scratch - -COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=0 /usr/bin/nsenter /usr/bin/ -COPY --from=0 /usr/bin/setpriv /usr/bin/ -COPY --from=0 /usr/lib/libcap-ng.so.* /usr/lib/ -COPY --from=0 /lib/ld-musl-x86_64.so.1 /lib/ -COPY --from=0 /usr/lib/libcrypt.so* /usr/lib/ -COPY --from=0 /go/src/github.com/shellhub-io/shellhub/agent/agent /bin/agent - -ENTRYPOINT ["/bin/agent"] diff --git a/agent/Dockerfile.arm32v6 b/agent/Dockerfile.arm32v6 deleted file mode 100644 index b74e2ab5cf5..00000000000 --- a/agent/Dockerfile.arm32v6 +++ /dev/null @@ -1,45 +0,0 @@ -# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - -FROM arm32v6/golang:1.20.4-alpine3.16 - -ARG SHELLHUB_VERSION=latest - -RUN apk add --update git ca-certificates util-linux build-base bash setpriv perl xz - -# We are using libxcrypt to support yescrypt password hashing method -# Since libxcrypt package is not available in Alpine, so we need to build libxcrypt from source code -RUN wget -q https://github.com/besser82/libxcrypt/releases/download/v4.4.27/libxcrypt-4.4.27.tar.xz && \ - tar xvf libxcrypt-4.4.27.tar.xz && cd libxcrypt-4.4.27 && \ - ./configure --prefix /usr && make -j$(nproc) && make install && \ - cd .. && rm -rf libxcrypt-4.4.27* - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub - -COPY ./go.mod ./ - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -COPY ./agent/go.mod ./agent/go.sum ./ - -RUN go mod download - -COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg -COPY ./agent . - -RUN go mod download - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -RUN GOOS=linux GOARCH=arm go build -tags docker -ldflags "-X main.AgentVersion=${SHELLHUB_VERSION}" - -FROM scratch - -COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=0 /usr/bin/nsenter /usr/bin/ -COPY --from=0 /usr/bin/setpriv /usr/bin/ -COPY --from=0 /usr/lib/libcap-ng.so.* /usr/lib/ -COPY --from=0 /lib/ld-musl-armhf.so.1 /lib/ -COPY --from=0 /usr/lib/libcrypt.so* /usr/lib/ -COPY --from=0 /go/src/github.com/shellhub-io/shellhub/agent/agent /bin/agent - -ENTRYPOINT ["/bin/agent"] diff --git a/agent/Dockerfile.arm32v7 b/agent/Dockerfile.arm32v7 deleted file mode 100644 index b17ee288ad8..00000000000 --- a/agent/Dockerfile.arm32v7 +++ /dev/null @@ -1,45 +0,0 @@ -# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - -FROM arm32v7/golang:1.20.4-alpine3.16 - -ARG SHELLHUB_VERSION=latest - -RUN apk add --update git ca-certificates util-linux build-base bash setpriv perl xz - -# We are using libxcrypt to support yescrypt password hashing method -# Since libxcrypt package is not available in Alpine, so we need to build libxcrypt from source code -RUN wget -q https://github.com/besser82/libxcrypt/releases/download/v4.4.27/libxcrypt-4.4.27.tar.xz && \ - tar xvf libxcrypt-4.4.27.tar.xz && cd libxcrypt-4.4.27 && \ - ./configure --prefix /usr && make -j$(nproc) && make install && \ - cd .. && rm -rf libxcrypt-4.4.27* - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub - -COPY ./go.mod ./ - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -COPY ./agent/go.mod ./agent/go.sum ./ - -RUN go mod download - -COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg -COPY ./agent . - -RUN go mod download - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -RUN GOOS=linux GOARCH=arm go build -tags docker -ldflags "-X main.AgentVersion=${SHELLHUB_VERSION}" - -FROM scratch - -COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=0 /usr/bin/nsenter /usr/bin/ -COPY --from=0 /usr/bin/setpriv /usr/bin/ -COPY --from=0 /usr/lib/libcap-ng.so.* /usr/lib/ -COPY --from=0 /lib/ld-musl-armhf.so.1 /lib/ -COPY --from=0 /usr/lib/libcrypt.so* /usr/lib/ -COPY --from=0 /go/src/github.com/shellhub-io/shellhub/agent/agent /bin/agent - -ENTRYPOINT ["/bin/agent"] diff --git a/agent/Dockerfile.arm64v8 b/agent/Dockerfile.arm64v8 deleted file mode 100644 index a00c2f483e3..00000000000 --- a/agent/Dockerfile.arm64v8 +++ /dev/null @@ -1,45 +0,0 @@ -# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - -FROM arm64v8/golang:1.20.4-alpine3.16 - -ARG SHELLHUB_VERSION=latest - -RUN apk add --update git ca-certificates util-linux build-base bash setpriv perl xz - -# We are using libxcrypt to support yescrypt password hashing method -# Since libxcrypt package is not available in Alpine, so we need to build libxcrypt from source code -RUN wget -q https://github.com/besser82/libxcrypt/releases/download/v4.4.27/libxcrypt-4.4.27.tar.xz && \ - tar xvf libxcrypt-4.4.27.tar.xz && cd libxcrypt-4.4.27 && \ - ./configure --prefix /usr && make -j$(nproc) && make install && \ - cd .. && rm -rf libxcrypt-4.4.27* - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub - -COPY ./go.mod ./ - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -COPY ./agent/go.mod ./agent/go.sum ./ - -RUN go mod download - -COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg -COPY ./agent . - -RUN go mod download - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -RUN GOOS=linux GOARCH=arm64 go build -tags docker -ldflags "-X main.AgentVersion=${SHELLHUB_VERSION}" - -FROM scratch - -COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=0 /usr/bin/nsenter /usr/bin/ -COPY --from=0 /usr/bin/setpriv /usr/bin/ -COPY --from=0 /usr/lib/libcap-ng.so.* /usr/lib/ -COPY --from=0 /lib/ld-musl-aarch64.so.1 /lib/ -COPY --from=0 /usr/lib/libcrypt.so* /usr/lib/ -COPY --from=0 /go/src/github.com/shellhub-io/shellhub/agent/agent /bin/agent - -ENTRYPOINT ["/bin/agent"] diff --git a/agent/Dockerfile.i386 b/agent/Dockerfile.i386 deleted file mode 100644 index 5811d645406..00000000000 --- a/agent/Dockerfile.i386 +++ /dev/null @@ -1,43 +0,0 @@ -FROM golang:1.20.4-alpine3.16 - -ARG SHELLHUB_VERSION=latest - -RUN apk add --update git ca-certificates util-linux build-base bash setpriv perl xz - -# We are using libxcrypt to support yescrypt password hashing method -# Since libxcrypt package is not available in Alpine, so we need to build libxcrypt from source code -RUN wget -q https://github.com/besser82/libxcrypt/releases/download/v4.4.27/libxcrypt-4.4.27.tar.xz && \ - tar xvf libxcrypt-4.4.27.tar.xz && cd libxcrypt-4.4.27 && \ - ./configure --prefix /usr && make -j$(nproc) && make install && \ - cd .. && rm -rf libxcrypt-4.4.27* - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub - -COPY ./go.mod ./ - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -COPY ./agent/go.mod ./agent/go.sum ./ - -RUN go mod download - -COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg -COPY ./agent . - -RUN go mod download - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent - -RUN GOOS=linux GOARCH=386 go build -tags docker -ldflags "-X main.AgentVersion=${SHELLHUB_VERSION}" - -FROM scratch - -COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=0 /usr/bin/nsenter /usr/bin/ -COPY --from=0 /usr/bin/setpriv /usr/bin/ -COPY --from=0 /usr/lib/libcap-ng.so.* /usr/lib/ -COPY --from=0 /lib/ld-musl-*.so.1 /lib/ -COPY --from=0 /usr/lib/libcrypt.so* /usr/lib/ -COPY --from=0 /go/src/github.com/shellhub-io/shellhub/agent/agent /bin/agent - -ENTRYPOINT ["/bin/agent"] diff --git a/agent/Dockerfile.test b/agent/Dockerfile.test new file mode 100644 index 00000000000..7d91b39fc37 --- /dev/null +++ b/agent/Dockerfile.test @@ -0,0 +1,38 @@ +FROM golang:1.24-alpine3.22 + +ARG GOPROXY + +RUN apk add --update ca-certificates util-linux setpriv bash openssh + +RUN ln -sf /bin/bash /bin/sh + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub + +COPY ./go.mod ./ + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent + +COPY ./agent/go.mod ./agent/go.sum ./ + +RUN go mod download + +ARG SHELLHUB_VERSION=latest +ARG GOPROXY + +COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg +COPY ./agent . + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub + +RUN go mod download + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/agent + +RUN go build -ldflags "-X main.AgentVersion=${SHELLHUB_VERSION}" + +ARG USERNAME +ARG PASSWORD + +RUN echo "${USERNAME}:${PASSWORD}" | chpasswd + +ENTRYPOINT ["./agent"] diff --git a/agent/agent.go b/agent/agent.go new file mode 100644 index 00000000000..d97f4bec07b --- /dev/null +++ b/agent/agent.go @@ -0,0 +1,642 @@ +// Package agent provides packages and functions to create a new ShellHub Agent instance. +// +// The ShellHub Agent is a lightweight software component that runs the device and provide communication between the +// device and ShellHub's server. Its main role is to provide a reserve SSH server always connected to the ShellHub +// server, allowing SSH connections to be established to the device even when it is behind a firewall or NAT. +// +// This package provides a simple API to create a new agent instance and start the communication with the server. The +// agent will automatically connect to the server and start listening for incoming connections. Once connected, the +// agent will also automatically reconnect to the server if the connection is lost. +// +// The update process isn't handled by this package. This feature is provided by its main implementation in +// [ShellHub Agent]. Check the [ShellHub Agent] documentation for more information. +// +// # Example: +// +// Creates the agent configuration with the minimum required fields: +// +// func main() { +// cfg := Config{ +// ServerAddress: "http://localhost:80", +// TenantID: "00000000-0000-4000-0000-000000000000", +// PrivateKey: "/tmp/shellhub.key", +// } +// +// ctx := context.Background() +// ag, err := NewAgentWithConfig(&cfg) +// if err != nil { +// panic(err) +// } +// +// if err := ag.Initialize(); err != nil { +// panic(err) +// } +// +// ag.Listen(ctx) +// } +// +// [ShellHub Agent]: https://github.com/shellhub-io/shellhub/tree/master/agent +package main + +import ( + "context" + "crypto/rsa" + "fmt" + "math/rand" + "net" + "net/url" + "os" + "runtime" + "strings" + "sync/atomic" + "time" + + "github.com/Masterminds/semver" + "github.com/pkg/errors" + "github.com/shellhub-io/shellhub/agent/pkg/keygen" + "github.com/shellhub-io/shellhub/agent/pkg/sysinfo" + "github.com/shellhub-io/shellhub/agent/pkg/tunnel" + "github.com/shellhub-io/shellhub/agent/server" + "github.com/shellhub-io/shellhub/pkg/api/client" + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/validator" + log "github.com/sirupsen/logrus" +) + +// Config provides the configuration for the agent service. +type Config struct { + // Set the ShellHub Cloud server address the agent will use to connect. + // This is required. + ServerAddress string `env:"SERVER_ADDRESS,required" validate:"required"` + + // Specify the path to the device private key. + // If not provided, the agent will generate a new one. + // This is required. + PrivateKey string `env:"PRIVATE_KEY,required" validate:"required"` + + // Sets the account tenant id used during communication to associate the + // device to a specific tenant. + // This is required. + TenantID string `env:"TENANT_ID,required" validate:"required"` + + // Determine the interval to send the keep alive message to the server. This + // has a direct impact of the bandwidth used by the device when in idle + // state. Default is 30 seconds. + KeepAliveInterval uint32 `env:"KEEPALIVE_INTERVAL,overwrite,default=30"` + + // Set the device preferred hostname. This provides a hint to the server to + // use this as hostname if it is available. + PreferredHostname string `env:"PREFERRED_HOSTNAME"` + + // Set the device preferred identity. This provides a hint to the server to + // use this identity if it is available. + PreferredIdentity string `env:"PREFERRED_IDENTITY,default="` + + // Stores the password for single-user mode (without root privileges). If not + // provided, multi-user mode (with root privileges) is enabled by default. + // NOTE: The password hash could be generated by ```openssl passwd```. + SingleUserPassword string `env:"SINGLE_USER_PASSWORD,default=$SIMPLE_USER_PASSWORD"` + + // SimpleUserPassword exists due to a typo on the environmental variable that stores the password for single user + // mode that was wrongly named `SIMPLE_USER_PASSWORD` instead of `SINGLE_USER_PASSWORD`, and willing to keep the + // compatibility, this new variable was created. + SimpleUserPassword string `env:"SIMPLE_USER_PASSWORD"` + + // MaxRetryConnectionTimeout specifies the maximum time, in seconds, that an agent will wait + // before attempting to reconnect to the ShellHub server. Default is 60 seconds. + MaxRetryConnectionTimeout int `env:"MAX_RETRY_CONNECTION_TIMEOUT,default=60" validate:"min=10,max=120"` + + // TransportVersion specifies the version of the agent transport protocol to use. + // Version 1 uses HTTP-based revdial, version 2 uses yamux multiplexing with multistream. + // Supported values are 1 and 2. Default is 1. + TransportVersion int `env:"TRANSPORT_VERSION,default=1"` +} + +func LoadConfigFromEnv() (*Config, map[string]interface{}, error) { + // NOTE(r): When T, the generic parameter, is a structure with required tag, the fallback for an + // "unprefixed" parameter is used. + // + // For example, + // + // For the structure below, the parser will parse successfully when the variables exist with or without the + // prefixes since the "required" tag is set to true. + // + // SHELLHUB_TENANT_ID=00000000-0000-4000-0000-000000000000 SERVER_ADDRESS=http://127.0.0.1 + // PRIVATE_KEY=/tmp/shellhub sudo -E ./agent + // + // struct { + // ServerAddress string `env:"SERVER_ADDRESS,required"` + // PrivateKey string `env:"PRIVATE_KEY,required"` + // TenantID string `env:"TENANT_ID,required` + // } + // + // This behavior is driven by the [envconfig] package. Check it out for more information. + // + // [envconfig]: https://github.com/sethvargo/go-envconfig + cfg, err := envs.ParseWithPrefix[Config]("SHELLHUB_") + if err != nil { + log.Error("failed to parse the configuration") + + return nil, nil, err + } + + // TODO: test the envinromental variables validation on integration tests. + if ok, fields, err := validator.New().StructWithFields(cfg); err != nil || !ok { + log.WithFields(fields).Error("failed to validate the configuration loaded from envs") + + return nil, fields, err + } + + return cfg, nil, nil +} + +type Agent struct { + config *Config + pubKey *rsa.PublicKey + Identity *models.DeviceIdentity + Info *models.DeviceInfo + authData *models.DeviceAuthResponse + cli client.Client + serverInfo *models.Info + server *server.Server + // TODO: Listening channel could be removed in favor of a better approach. + listening chan bool + closed atomic.Bool + mode Mode + // listener is the current connection to the server. + listener atomic.Pointer[net.Listener] + // logger is the agent's logger instance. + logger *log.Entry +} + +// NewAgent creates a new agent instance, requiring the ShellHub server's address to connect to, the namespace's tenant +// where device own and the path to the private key on the file system. +// +// To create a new [Agent] instance with all configurations, you can use [NewAgentWithConfig]. +func NewAgent(address string, tenantID string, privateKey string, mode Mode) (*Agent, error) { + return NewAgentWithConfig(&Config{ + ServerAddress: address, + TenantID: tenantID, + PrivateKey: privateKey, + }, mode) +} + +var ( + ErrNewAgentWithConfigEmptyServerAddress = errors.New("address is empty") + ErrNewAgentWithConfigInvalidServerAddress = errors.New("address is invalid") + ErrNewAgentWithConfigEmptyTenant = errors.New("tenant is empty") + ErrNewAgentWithConfigEmptyPrivateKey = errors.New("private key is empty") + ErrNewAgentWithConfigNilMode = errors.New("agent's mode is nil") +) + +// NewAgentWithConfig creates a new agent instance with all configurations. +// +// Check [Config] for more information. +func NewAgentWithConfig(config *Config, mode Mode) (*Agent, error) { + if config.ServerAddress == "" { + return nil, ErrNewAgentWithConfigEmptyServerAddress + } + + if _, err := url.ParseRequestURI(config.ServerAddress); err != nil { + return nil, ErrNewAgentWithConfigInvalidServerAddress + } + + if config.TenantID == "" { + return nil, ErrNewAgentWithConfigEmptyTenant + } + + if config.PrivateKey == "" { + return nil, ErrNewAgentWithConfigEmptyPrivateKey + } + + if mode == nil { + return nil, ErrNewAgentWithConfigNilMode + } + + return &Agent{ + config: config, + mode: mode, + }, nil +} + +// Initialize initializes the ShellHub Agent, generating device identity, loading device information, generating private +// key, reading public key, probing server information and authorizing device on ShellHub server. +// +// When any of the steps fails, the agent will return an error, and the agent will not be able to start. +func (a *Agent) Initialize() error { + var err error + + a.cli, err = client.NewClient(a.config.ServerAddress, client.WithVersion(AgentVersion)) + if err != nil { + return errors.Wrap(err, "failed to create the HTTP client") + } + + if err := a.generateDeviceIdentity(); err != nil { + return errors.Wrap(err, "failed to generate device identity") + } + + if err := a.loadDeviceInfo(); err != nil { + return errors.Wrap(err, "failed to load device info") + } + + if err := a.generatePrivateKey(); err != nil { + return errors.Wrap(err, "failed to generate private key") + } + + if err := a.readPublicKey(); err != nil { + return errors.Wrap(err, "failed to read public key") + } + + if err := a.probeServerInfo(); err != nil { + return errors.Wrap(err, "failed to probe server info") + } + + if err := a.authorize(); err != nil { + return errors.Wrap(err, "failed to authorize device") + } + + a.closed.Store(false) + + a.logger = log.WithFields(log.Fields{ + "version": AgentVersion, + "tenant_id": a.authData.Namespace, + "server_address": a.config.ServerAddress, + "ssh_endpoint": a.serverInfo.Endpoints.SSH, + "api_endpoint": a.serverInfo.Endpoints.API, + "transport_version": a.config.TransportVersion, + "sshid": fmt.Sprintf("%s.%s@%s", a.authData.Namespace, a.authData.Name, strings.Split(a.serverInfo.Endpoints.SSH, ":")[0]), + }) + + return nil +} + +// generatePrivateKey generates a new private key if it doesn't exist on the filesystem. +func (a *Agent) generatePrivateKey() error { + if _, err := os.Stat(a.config.PrivateKey); os.IsNotExist(err) { + if err := keygen.GeneratePrivateKey(a.config.PrivateKey); err != nil { + return err + } + } + + return nil +} + +func (a *Agent) readPublicKey() error { + key, err := keygen.ReadPublicKey(a.config.PrivateKey) + a.pubKey = key + + return err +} + +// generateDeviceIdentity generates a device identity. +// +// The default value for Agent Identity is a network interface MAC address, but if the `SHELLHUB_PREFERRED_IDENTITY` is +// defined and set on [Config] structure, the device identity is set to this value. +func (a *Agent) generateDeviceIdentity() error { + if id := a.config.PreferredIdentity; id != "" { + a.Identity = &models.DeviceIdentity{ + MAC: id, + } + + return nil + } + + // get identity from network interface. + iface, err := sysinfo.PrimaryInterface() + if err != nil { + return err + } + + a.Identity = &models.DeviceIdentity{ + MAC: iface.HardwareAddr.String(), + } + + return nil +} + +// loadDeviceInfo load some device informations like OS name, version, arch and platform. +func (a *Agent) loadDeviceInfo() error { + info, err := a.mode.GetInfo() + if err != nil { + return err + } + + a.Info = &models.DeviceInfo{ + ID: info.ID, + PrettyName: info.Name, + Version: AgentVersion, + Platform: AgentPlatform, + Arch: runtime.GOARCH, + } + + return nil +} + +// probeServerInfo gets information about the ShellHub server. +func (a *Agent) probeServerInfo() error { + info, err := a.cli.GetInfo(AgentVersion) + a.serverInfo = info + + return err +} + +var ErrNoIdentityAndHostname = errors.New("the device doesn't have a valid hostname and identity. Set PREFERRED_IDENTITY or PREFERRED_HOSTNAME to specify the device's name and identity") + +// authorize send auth request to the server with device information in order to register it in the namespace. +func (a *Agent) authorize() error { + req := &models.DeviceAuthRequest{ + Info: a.Info, + DeviceAuth: &models.DeviceAuth{ + Hostname: a.config.PreferredHostname, + Identity: a.Identity, + TenantID: a.config.TenantID, + PublicKey: string(keygen.EncodePublicKeyToPem(a.pubKey)), + }, + } + + // NOTE: A MAC address can be empty when the network interface used to communicate with the external world isn't a + // physical one. In this case, we should be able to define a custom value for MAC's field using the + // [PREFERRED_IDENTITY] variable. If the hostname is also empty, [PREFERRED_HOSTNAME] could be defined to provide a + // fallback identifier for the device. This ensures that even if both the MAC address and hostname are missing, we + // have a way to identify the device uniquely. When it occurs, and no variable was defined, the agent should fail to + // initialize. + if req.DeviceAuth.Hostname == "" && (req.DeviceAuth.Identity == nil || req.DeviceAuth.Identity.MAC == "") { + return ErrNoIdentityAndHostname + } + + data, err := a.cli.AuthDevice(req) + if err != nil { + return err + } + + a.authData = data + + return err +} + +func (a *Agent) isClosed() bool { + return a.closed.Load() +} + +// Close closes the ShellHub Agent's listening, stoping it from receive new connection requests. +func (a *Agent) Close() error { + a.closed.Store(true) + + l := a.listener.Load() + if l == nil { + return nil + } + + return (*l).Close() +} + +const ( + TransportV1 = 1 + TransportV2 = 2 +) + +func (a *Agent) Listen(ctx context.Context) error { + a.mode.Serve(a) + + switch a.config.TransportVersion { + case TransportV1: + return a.listenV1(ctx) + case TransportV2: + return a.listenV2(ctx) + default: + return fmt.Errorf("unsupported transport version: %d", a.config.TransportVersion) + } +} + +func (a *Agent) listenV1(ctx context.Context) error { + // NOTE: ListenV1 exists to separte the logic between tunnel versions. When tunnel v1 is deprecated, this function + // can be removed and its logic moved to [Listen]. + tun := tunnel.NewTunnelV1() + + tun.Handle(HandleSSHOpenV1, sshHandlerV1(a)) + tun.Handle(HandleSSHCloseV1, sshCloseHandlerV1(a)) + tun.Handle(HandleHTTPProxyV1, httpProxyHandlerV1(a)) + + go a.ping(ctx, AgentPingDefaultInterval) //nolint:errcheck + + ctx, cancel := context.WithCancel(ctx) + go func() { + for { + if a.isClosed() { + a.logger.Info("Stopped listening for connections") + + cancel() + + return + } + + // TODO: As this path isn't meant to be changed, it could be moved to the [NewReverseListenerV1] function. + ShellHubConnectV1Path := "/ssh/connection" + + a.logger.Debug("Using tunnel version 1") + + listener, err := a.cli.NewReverseListenerV1( + ctx, + a.authData.Token, + ShellHubConnectV1Path, + ) + if err != nil { + a.logger.Error("Failed to connect to server through reverse tunnel. Retry in 10 seconds") + + time.Sleep(time.Second * 10) + + continue + } + a.listener.Store(&listener) + + a.logger.Info("Server connection established") + + a.listening <- true + + if err := tun.Listen(ctx, listener); err != nil { + a.logger.WithError(err).Error("Tunnel listener exited with error") + } + + a.listening <- false + } + }() + + <-ctx.Done() + + return a.Close() +} + +func (a *Agent) listenV2(ctx context.Context) error { + // NOTE: ListenV2 exists to separte the logic between tunnel versions. When tunnel v1 is deprecated, this function + // can be removed and its logic moved to [Listen]. + tun := tunnel.NewTunnelV2(a.cli) + + tun.Handle(HandleSSHOpenV2, sshHandlerV2(a)) + tun.Handle(HandleSSHCloseV2, sshCloseHandlerV2(a)) + tun.Handle(HandleHTTPProxyV2, httpProxyHandlerV2(a)) + + go a.ping(ctx, AgentPingDefaultInterval) //nolint:errcheck + + ctx, cancel := context.WithCancel(ctx) + go func() { + for { + if a.isClosed() { + a.logger.Info("Stopped listening for connections") + + cancel() + + return + } + + // TODO: As this path isn't meant to be changed, it could be moved to the [NewReverseListenerV2] function. + ShellHubConnectV2Path := "/agent/connection" + + a.logger.Debug("Using tunnel version 2") + + listener, err := a.cli.NewReverseListenerV2( + ctx, + a.authData.Token, + ShellHubConnectV2Path, + client.NewReverseV2ConfigFromMap(a.authData.Config), + ) + if err != nil { + a.logger.Error("Failed to connect to server through reverse tunnel. Retry in 10 seconds") + + time.Sleep(time.Second * 10) + + continue + } + a.listener.Store(&listener) + + a.logger.Info("Server connection established") + + a.listening <- true + + if err := tun.Listen(ctx, listener); err != nil { + a.logger.WithError(err).Error("Tunnel listener exited with error") + } + + a.listening <- false + } + }() + + <-ctx.Done() + + return a.Close() +} + +// AgentPingDefaultInterval is the default time interval between ping on agent. +const AgentPingDefaultInterval = 10 * time.Minute + +// ping sends an authorization request to the ShellHub server at each interval. +// A random value between 10 and [config.MaxRetryConnectionTimeout] seconds is added to the interval +// each time the ticker is executed. +// +// Ping only sends requests to the server if the agent is listening for connections. If the agent is not +// listening, the ping process will be stopped. When the interval is 0, the default value is 10 minutes. +func (a *Agent) ping(ctx context.Context, interval time.Duration) error { + a.listening = make(chan bool) + + if interval == 0 { + interval = AgentPingDefaultInterval + } + + <-a.listening // NOTE: wait for the first connection to start to ping the server. + ticker := time.NewTicker(interval) + + for { + if a.isClosed() { + return nil + } + + select { + case <-ctx.Done(): + log.WithFields(log.Fields{ + "version": AgentVersion, + "tenant_id": a.authData.Namespace, + "server_address": a.config.ServerAddress, + }).Debug("stopped pinging server due to context cancellation") + + return nil + case ok := <-a.listening: + if ok { + log.WithFields(log.Fields{ + "version": AgentVersion, + "tenant_id": a.authData.Namespace, + "server_address": a.config.ServerAddress, + "timestamp": time.Now(), + }).Debug("Starting the ping interval to server") + + ticker.Reset(interval) + } else { + log.WithFields(log.Fields{ + "version": AgentVersion, + "tenant_id": a.authData.Namespace, + "server_address": a.config.ServerAddress, + "timestamp": time.Now(), + }).Debug("Stopped pinging server due listener status") + + ticker.Stop() + } + case <-ticker.C: + if err := a.authorize(); err != nil { + a.server.SetDeviceName(a.authData.Name) + } + + log.WithFields(log.Fields{ + "version": AgentVersion, + "tenant_id": a.authData.Namespace, + "server_address": a.config.ServerAddress, + "name": a.authData.Name, + "hostname": a.config.PreferredHostname, + "identity": a.config.PreferredIdentity, + "timestamp": time.Now(), + }).Info("Ping") + + randTimeout := time.Duration(rand.Intn(a.config.MaxRetryConnectionTimeout-10)+10) * time.Second //nolint:gosec + ticker.Reset(interval + randTimeout) + } + } +} + +// CheckUpdate gets the ShellHub's server version. +func (a *Agent) CheckUpdate() (*semver.Version, error) { + info, err := a.cli.GetInfo(AgentVersion) + if err != nil { + return nil, err + } + + return semver.NewVersion(info.Version) +} + +// GetInfo gets the ShellHub's server information like version and endpoints, and updates the Agent's server's info. +func (a *Agent) GetInfo() (*models.Info, error) { + if a.serverInfo != nil { + return a.serverInfo, nil + } + + info, err := a.cli.GetInfo(AgentVersion) + if err != nil { + return nil, err + } + + a.serverInfo = info + + return info, nil +} + +// GetInfo gets information like the version and the enpoints for HTTP and SSH to ShellHub server. +func GetInfo(cfg *Config) (*models.Info, error) { + cli, err := client.NewClient(cfg.ServerAddress) + if err != nil { + return nil, errors.Wrap(err, "failed to create the HTTP client") + } + + info, err := cli.GetInfo(AgentVersion) + if err != nil { + return nil, err + } + + return info, nil +} diff --git a/agent/agent_test.go b/agent/agent_test.go new file mode 100644 index 00000000000..4bad7d1902f --- /dev/null +++ b/agent/agent_test.go @@ -0,0 +1,308 @@ +package main + +import ( + "testing" + + "github.com/pkg/errors" + client_mocks "github.com/shellhub-io/shellhub/pkg/api/client/mocks" + "github.com/shellhub-io/shellhub/pkg/envs" + env_mocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/validator" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func ExampleNewAgentWithConfig() { + _, err := NewAgentWithConfig(&Config{ + ServerAddress: "http://localhost:80", + TenantID: "00000000-0000-4000-0000-000000000000", + PrivateKey: "./shellhub.key", + }, new(HostMode)) + if err != nil { + panic(err) + } +} + +func ExampleNewAgent() { + _, err := NewAgent("http://localhost:80", "00000000-0000-4000-0000-000000000000", "./shellhub.key", new(HostMode)) + if err != nil { + panic(err) + } +} + +func TestLoadConfigFromEnv(t *testing.T) { + envMock := new(env_mocks.Backend) + envs.DefaultBackend = envMock + + type expected struct { + cfg *Config + fields map[string]interface{} + err error + } + + tests := []struct { + description string + requiredMocks func() + expected expected + }{ + { + description: "fail to load the environment variables when required ones are not set", + requiredMocks: func() { + envs := new(Config) + + envMock.On("Process", "SHELLHUB_", envs).Return(errors.New("")).Once() + }, + expected: expected{ + cfg: nil, + fields: nil, + err: envs.ErrParseWithPrefix, + }, + }, + { + description: "fail to load the environment variables when one required values is empty", + requiredMocks: func() { + envs := new(Config) + + envMock.On("Process", "SHELLHUB_", envs).Return(nil).Once().Run(func(args mock.Arguments) { + cfg := args.Get(1).(*Config) + + cfg.ServerAddress = "http://localhost" + cfg.TenantID = "" + cfg.PrivateKey = "" + cfg.MaxRetryConnectionTimeout = 30 + }) + }, + expected: expected{ + cfg: nil, + fields: map[string]interface{}{ + "TenantID": "required", + "PrivateKey": "required", + }, + err: validator.ErrStructureInvalid, + }, + }, + { + description: "fail to load the environment variables when required values are empty", + requiredMocks: func() { + envs := new(Config) + + envMock.On("Process", "SHELLHUB_", envs).Return(nil).Once().Run(func(args mock.Arguments) { + cfg := args.Get(1).(*Config) + + cfg.ServerAddress = "" + cfg.TenantID = "" + cfg.PrivateKey = "" + cfg.MaxRetryConnectionTimeout = 30 + }) + }, + expected: expected{ + cfg: nil, + fields: map[string]interface{}{ + "ServerAddress": "required", + "TenantID": "required", + "PrivateKey": "required", + }, + err: validator.ErrStructureInvalid, + }, + }, + { + description: "success to load the environmental variables", + requiredMocks: func() { + envs := new(Config) + + envMock.On("Process", "SHELLHUB_", envs).Return(nil).Once().Run(func(args mock.Arguments) { + cfg := args.Get(1).(*Config) + + cfg.ServerAddress = "http://localhost" + cfg.TenantID = "1c462afa-e4b6-41a5-ba54-7236a1770466" + cfg.PrivateKey = "/tmp/shellhub.key" + cfg.MaxRetryConnectionTimeout = 30 + }) + }, + expected: expected{ + cfg: &Config{ + ServerAddress: "http://localhost", + TenantID: "1c462afa-e4b6-41a5-ba54-7236a1770466", + PrivateKey: "/tmp/shellhub.key", + MaxRetryConnectionTimeout: 30, + }, + fields: nil, + err: nil, + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + test.requiredMocks() + + cfg, fields, err := LoadConfigFromEnv() + assert.Equal(t, test.expected.cfg, cfg) + assert.Equal(t, test.expected.fields, fields) + assert.ErrorIs(t, err, test.expected.err) + }) + } +} + +func TestNewAgentWithConfig(t *testing.T) { + type expected struct { + agent *Agent + err error + } + + // NOTICE: configuration structure used by the successfully test. + config := &Config{ + ServerAddress: "http://localhost", + TenantID: "1c462afa-e4b6-41a5-ba54-7236a1770466", + PrivateKey: "/tmp/shellhub.key", + } + + tests := []struct { + description string + config *Config + mode Mode + expected expected + }{ + { + description: "fail when server address is empty", + config: &Config{ + ServerAddress: "", + }, + mode: new(HostMode), + expected: expected{ + agent: nil, + err: ErrNewAgentWithConfigEmptyServerAddress, + }, + }, + { + description: "fail when server address is invalid", + config: &Config{ + ServerAddress: "invalid_url", + }, + mode: new(HostMode), + expected: expected{ + agent: nil, + err: ErrNewAgentWithConfigInvalidServerAddress, + }, + }, + { + description: "fail when tenant is empty", + config: &Config{ + ServerAddress: "http://localhost", + TenantID: "", + }, + mode: new(HostMode), + expected: expected{ + agent: nil, + err: ErrNewAgentWithConfigEmptyTenant, + }, + }, + { + description: "fail when private key is empty", + config: &Config{ + ServerAddress: "http://localhost", + TenantID: "1c462afa-e4b6-41a5-ba54-7236a1770466", + PrivateKey: "", + }, + mode: new(HostMode), + expected: expected{ + agent: nil, + err: ErrNewAgentWithConfigEmptyPrivateKey, + }, + }, + { + description: "fail when mode is nil", + config: &Config{ + ServerAddress: "http://localhost", + TenantID: "1c462afa-e4b6-41a5-ba54-7236a1770466", + PrivateKey: "/tmp/shellhub.key", + }, + mode: nil, + expected: expected{ + agent: nil, + err: ErrNewAgentWithConfigNilMode, + }, + }, + { + description: "success to create agent with config", + config: config, + mode: new(HostMode), + expected: expected{ + agent: &Agent{ + config: config, + mode: new(HostMode), + }, + err: nil, + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + agent, err := NewAgentWithConfig(test.config, test.mode) + + assert.Equal(t, test.expected.agent, agent) + assert.ErrorIs(t, err, test.expected.err) + }) + } +} + +func TestAgent_GetInfo(t *testing.T) { + clientMocks := new(client_mocks.Client) + + AgentVersion = "latest" + + type expected struct { + info *models.Info + err error + } + + agent := &Agent{ + cli: clientMocks, + } + + err := errors.New("") + + tests := []struct { + description string + requiredMocks func() + expected expected + }{ + { + description: "fail to get the server info", + requiredMocks: func() { + clientMocks.On("GetInfo", "latest").Return(nil, err).Once() + }, + expected: expected{ + info: nil, + err: err, + }, + }, + { + description: "success to get the server info", + requiredMocks: func() { + clientMocks.On("GetInfo", "latest").Return(&models.Info{ + Version: "latest", + }, nil).Once() + }, + expected: expected{ + info: &models.Info{ + Version: "latest", + }, + err: nil, + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + test.requiredMocks() + + info, err := agent.GetInfo() + + assert.Equal(t, test.expected.info, info) + assert.ErrorIs(t, err, test.expected.err) + }) + } +} diff --git a/agent/connector.go b/agent/connector.go new file mode 100644 index 00000000000..f9032d16e48 --- /dev/null +++ b/agent/connector.go @@ -0,0 +1,354 @@ +package main + +import ( + "context" + "fmt" + "strings" + "sync" + "time" + + "github.com/Masterminds/semver" + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/events" + "github.com/docker/docker/api/types/filters" + dockerclient "github.com/docker/docker/client" + "github.com/shellhub-io/shellhub/agent/pkg/connector" + "github.com/shellhub-io/shellhub/pkg/api/client" + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/validator" + log "github.com/sirupsen/logrus" +) + +var _ connector.Connector = new(DockerConnector) + +// DockerConnector is a struct that represents a connector that uses Docker as the container runtime. +type DockerConnector struct { + mu sync.Mutex + // server is the ShellHub address of the server that the agent will connect to. + server string + // tenant is the tenant ID of the namespace that the agent belongs to. + tenant string + // cli is the Docker client. + cli *dockerclient.Client + // privateKeys is the path to the directory that contains the private keys for the containers. + privateKeys string + // Label is the label used to identify the containers managed by the ShellHub agent. + Label string + // cancels is a map that contains the cancel functions for each container. + // This is used to stop the agent for a container, marking as done its context and closing the agent. + cancels map[string]context.CancelFunc +} + +// ConfigConnector provides the configuration for the agent connector service. +type ConfigConnector struct { + // Set the ShellHub server address the agent will use to connect. + // This is required. + ServerAddress string `env:"SERVER_ADDRESS,required"` + + // Specify the path to store the devices/containers private keys. + // If not provided, the agent will generate a new one. + // This is required. + PrivateKeys string `env:"PRIVATE_KEYS,required"` + + // Sets the account tenant id used during communication to associate the + // devices to a specific tenant. + // This is required. + TenantID string `env:"TENANT_ID,required"` + + // Determine the interval to send the keep alive message to the server. This + // has a direct impact of the bandwidth used by the device when in idle + // state. Default is 30 seconds. + KeepAliveInterval int `env:"KEEPALIVE_INTERVAL,overwrite,default=30"` + + // Label is the label used to identify the containers managed by the ShellHub agent. + Label string `env:"CONNECTOR_LABEL,default="` +} + +func LoadConfigConnectorFromEnv() (*ConfigConnector, map[string]interface{}, error) { + cfg, err := envs.ParseWithPrefix[ConfigConnector]("SHELLHUB_") + if err != nil { + log.Fatal(err) + } + + // TODO: test the envinromental variables validation on integration tests. + if ok, fields, err := validator.New().StructWithFields(cfg); err != nil || !ok { + log.WithFields(fields).Error("failed to validate the configuration loaded from envs") + + return nil, fields, err + } + + return cfg, nil, nil +} + +func NewDockerConnectorWithClient(cli *dockerclient.Client, config *ConfigConnector) connector.Connector { + return &DockerConnector{ + cli: cli, + server: config.ServerAddress, + tenant: config.TenantID, + privateKeys: config.PrivateKeys, + Label: config.Label, + cancels: make(map[string]context.CancelFunc), + } +} + +// NewDockerConnector creates a new [Connector] that uses Docker as the container runtime. +func NewDockerConnector(config *ConfigConnector) (connector.Connector, error) { + cli, err := dockerclient.NewClientWithOpts(dockerclient.FromEnv, dockerclient.WithAPIVersionNegotiation()) + if err != nil { + return nil, err + } + + return &DockerConnector{ + cli: cli, + server: config.ServerAddress, + tenant: config.TenantID, + privateKeys: config.PrivateKeys, + Label: config.Label, + cancels: make(map[string]context.CancelFunc), + }, nil +} + +// events returns the docker events. +func (d *DockerConnector) events(ctx context.Context) (<-chan events.Message, <-chan error) { + filters := filters.NewArgs() + if d.Label != "" { + filters.Add("label", d.Label) + } + + return d.cli.Events(ctx, events.ListOptions{ + Filters: filters, + }) +} + +func (d *DockerConnector) List(ctx context.Context) ([]connector.Container, error) { + filters := filters.NewArgs() + if d.Label != "" { + filters.Add("label", d.Label) + } + + containers, err := d.cli.ContainerList(ctx, container.ListOptions{ + Filters: filters, + }) + if err != nil { + return nil, err + } + + list := make([]connector.Container, len(containers)) + for i, container := range containers { + list[i].ID = container.ID + + name, err := d.getContainerNameFromID(ctx, container.ID) + if err != nil { + return nil, err + } + + list[i].Name = name + } + + return list, nil +} + +// Start starts the agent for the container with the given ID. +func (d *DockerConnector) Start(ctx context.Context, id string, name string) { + id = id[:12] + + d.mu.Lock() + ctx, d.cancels[id] = context.WithCancel(ctx) + d.mu.Unlock() + + privateKey := fmt.Sprintf("%s/%s.key", d.privateKeys, id) + go initContainerAgent(ctx, d.cli, connector.Container{ + ID: id, + Name: name, + ServerAddress: d.server, + Tenant: d.tenant, + PrivateKey: privateKey, + Cancel: d.cancels[id], + }) +} + +// Stop stops the agent for the container with the given ID. +func (d *DockerConnector) Stop(_ context.Context, id string) { + id = id[:12] + + d.mu.Lock() + defer d.mu.Unlock() + + cancel, ok := d.cancels[id] + if ok { + cancel() + delete(d.cancels, id) + } +} + +func (d *DockerConnector) getContainerNameFromID(ctx context.Context, id string) (string, error) { + container, err := d.cli.ContainerInspect(ctx, id) + if err != nil { + return "", err + } + + // NOTE: It removes the first character on container's name that is a `/`. + name := container.Name[1:] + + // NOTE: Normalize the container name to comply with ShellHub's device naming conventions. + // While Docker allows characters like dots and hyphens in its naming pattern `[a-zA-Z0-9][a-zA-Z0-9_.-]`, + // ShellHub restricts names to letters, numbers, underscores, and hyphens, with a maximum length of 64 characters + // `([a-zA-Z0-9_-]){1,64}$`. This normalization is essential for compatibility. + name = strings.ReplaceAll(name, ".", "_") + if len(name) > 64 { + name = name[:64] + } + + return name, nil +} + +// Listen listens for events and starts or stops the agent for the containers. +func (d *DockerConnector) Listen(ctx context.Context) error { + containers, err := d.List(ctx) + if err != nil { + return err + } + + for _, container := range containers { + d.Start(ctx, container.ID, container.Name) + } + + events, errs := d.events(ctx) + for { + select { + case <-ctx.Done(): + return nil + case err := <-errs: + return err + case container := <-events: + // NOTE: "start" and "die" Docker's events are call every time a new container start or stop, + // independently how the command was run. For example, if a container was started with `docker run -d`, the + // "start" event will be called, but if the same container was started with `docker start `, + // the "start" event will be called too. The same happens with the "die" event. + switch container.Action { + case "start": + name, err := d.getContainerNameFromID(ctx, container.Actor.ID) + if err != nil { + return err + } + + d.Start(ctx, container.Actor.ID, name) + case "die": + d.Stop(ctx, container.Actor.ID) + } + } + } +} + +func (d *DockerConnector) CheckUpdate() (*semver.Version, error) { + api, err := client.NewClient(d.server) + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "version": connector.ConnectorVersion, + }).Error("Failed to create HTTP client to check agent version") + + return nil, err + } + + info, err := api.GetInfo(connector.ConnectorVersion) + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "version": connector.ConnectorVersion, + }).Error("Failed to get info from ShellHub's server") + + return nil, err + } + + return semver.NewVersion(info.Version) +} + +// initContainerAgent initializes the agent for a container. +func initContainerAgent(ctx context.Context, cli *dockerclient.Client, container connector.Container) { + AgentPlatform = "connector" + AgentVersion = connector.ConnectorVersion + + // TODO: Let this configuration build next to the Agent [agent.LoadConfigConnectorFromEnv] function. + cfg := &Config{ + ServerAddress: container.ServerAddress, + TenantID: container.Tenant, + PrivateKey: container.PrivateKey, + PreferredIdentity: container.ID, + PreferredHostname: container.Name, + KeepAliveInterval: 30, + MaxRetryConnectionTimeout: 60, + } + + log.WithFields(log.Fields{ + "id": container.ID, + "identity": cfg.PreferredIdentity, + "hostname": cfg.PreferredHostname, + "tenant_id": cfg.TenantID, + "server_address": cfg.ServerAddress, + "timestamp": time.Now(), + "version": AgentVersion, + }).Info("Connector container started") + + mode, err := NewConnectorMode(cli, container.ID) + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "id": container.ID, + "identity": cfg.PreferredIdentity, + "hostname": cfg.PreferredHostname, + "tenant_id": cfg.TenantID, + "server_address": cfg.ServerAddress, + "timestamp": time.Now(), + "version": AgentVersion, + }).Fatal("Failed to create connector mode") + } + + ag, err := NewAgentWithConfig(cfg, mode) + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "id": container.ID, + "configuration": cfg, + "version": AgentVersion, + }).Fatal("Failed to create agent") + } + + if err := ag.Initialize(); err != nil { + log.WithError(err).WithFields(log.Fields{ + "id": container.ID, + "configuration": cfg, + "version": AgentVersion, + }).Fatal("Failed to initialize agent") + } + + log.WithFields(log.Fields{ + "id": container.ID, + "identity": cfg.PreferredIdentity, + "hostname": cfg.PreferredHostname, + "tenant_id": cfg.TenantID, + "server_address": cfg.ServerAddress, + "timestamp": time.Now(), + "version": AgentVersion, + }).Info("Listening for connections") + + // NOTICE(r): listing for connection and wait for a channel message to close the agent. It will receives + // this mensagem when something out of this goroutine send a `done`, what will cause the agent closes + // and no more connection to be allowed until it be started again. + if err := ag.Listen(ctx); err != nil { + log.WithError(err).WithFields(log.Fields{ + "id": container.ID, + "identity": cfg.PreferredIdentity, + "hostname": cfg.PreferredHostname, + "tenant_id": cfg.TenantID, + "server_address": cfg.ServerAddress, + "timestamp": time.Now(), + "version": AgentVersion, + }).Fatal("Failed to listen for connections") + } + + log.WithFields(log.Fields{ + "id": container.ID, + "identity": cfg.PreferredIdentity, + "hostname": cfg.PreferredHostname, + "tenant_id": cfg.TenantID, + "server_address": cfg.ServerAddress, + "version": AgentVersion, + }).Info("Connector container done") +} diff --git a/agent/entrypoint-dev.sh b/agent/entrypoint-dev.sh index 174fcb3ecdb..9d6fbb11be0 100755 --- a/agent/entrypoint-dev.sh +++ b/agent/entrypoint-dev.sh @@ -1,3 +1,17 @@ #!/bin/sh -refresh run +# Cleanup function to kill Delve processes on exit +cleanup() { + echo "Cleaning up Delve processes..." + pkill -9 dlv + exit 0 +} + +# Trap SIGTERM and SIGINT to ensure cleanup +trap cleanup SIGTERM SIGINT + +# Start air in background +air & + +# Wait for air process +wait $! diff --git a/agent/go.mod b/agent/go.mod index b2c83c1687b..1240d56edb8 100644 --- a/agent/go.mod +++ b/agent/go.mod @@ -1,57 +1,76 @@ module github.com/shellhub-io/shellhub/agent -go 1.20 +go 1.24.9 require ( + github.com/GehirnInc/crypt v0.0.0-20230320061759-8cc1b52080c5 github.com/Masterminds/semver v1.5.0 + github.com/creack/pty v1.1.24 + github.com/docker/docker v28.5.2+incompatible + github.com/gliderlabs/ssh v0.3.5 + github.com/go-playground/assert/v2 v2.2.0 + github.com/gorilla/websocket v1.5.3 + github.com/labstack/echo/v4 v4.15.0 + github.com/mattn/go-shellwords v1.0.12 + github.com/multiformats/go-multistream v0.6.1 + github.com/openwall/yescrypt-go v1.0.0 + github.com/pkg/errors v0.9.1 + github.com/pkg/sftp v1.13.10 github.com/shellhub-io/shellhub v0.13.4 - github.com/sirupsen/logrus v1.9.3 - github.com/spf13/cobra v1.8.0 -) - -require ( - github.com/labstack/gommon v0.4.0 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect - github.com/valyala/bytebufferpool v1.0.0 // indirect - github.com/valyala/fasttemplate v1.2.2 // indirect + github.com/sirupsen/logrus v1.9.4 + github.com/spf13/cobra v1.10.2 + github.com/stretchr/testify v1.11.1 + golang.org/x/crypto v0.47.0 + golang.org/x/sys v0.40.0 ) require ( github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect - github.com/GehirnInc/crypt v0.0.0-20230320061759-8cc1b52080c5 // indirect - github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect - github.com/creack/pty v1.1.18 // indirect - github.com/docker/distribution v2.8.2+incompatible // indirect - github.com/docker/docker v24.0.7+incompatible // indirect - github.com/docker/go-connections v0.4.0 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/go-connections v0.5.0 // indirect github.com/docker/go-units v0.5.0 // indirect - github.com/gliderlabs/ssh v0.3.5 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.11.2 // indirect github.com/go-resty/resty/v2 v2.7.0 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang-jwt/jwt/v4 v4.5.0 // indirect - github.com/gorilla/websocket v1.5.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/hashicorp/yamux v0.1.2 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/kr/fs v0.1.0 // indirect - github.com/labstack/echo/v4 v4.10.2 // indirect + github.com/labstack/gommon v0.4.2 // indirect github.com/leodido/go-urn v1.2.2 // indirect - github.com/mattn/go-shellwords v1.0.12 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/sys/atomicwriter v0.1.0 // indirect + github.com/multiformats/go-varint v0.0.6 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.0.2 // indirect - github.com/pkg/errors v0.9.1 // indirect - github.com/pkg/sftp v1.13.5 // indirect + github.com/opencontainers/image-spec v1.1.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/sethvargo/go-envconfig v0.9.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect - golang.org/x/crypto v0.17.0 // indirect - golang.org/x/mod v0.8.0 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sys v0.15.0 // indirect - golang.org/x/text v0.14.0 // indirect - golang.org/x/tools v0.6.0 // indirect + github.com/spf13/pflag v1.0.9 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasttemplate v1.2.2 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 // indirect + go.opentelemetry.io/otel v1.37.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0 // indirect + go.opentelemetry.io/otel/metric v1.37.0 // indirect + go.opentelemetry.io/otel/trace v1.37.0 // indirect + go.opentelemetry.io/proto/otlp v1.2.0 // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/text v0.33.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + gotest.tools/v3 v3.5.1 // indirect ) replace github.com/shellhub-io/shellhub => ../ diff --git a/agent/go.sum b/agent/go.sum index 64c293fb02f..12a61c06782 100644 --- a/agent/go.sum +++ b/agent/go.sum @@ -4,25 +4,41 @@ github.com/GehirnInc/crypt v0.0.0-20230320061759-8cc1b52080c5 h1:IEjq88XO4PuBDcv github.com/GehirnInc/crypt v0.0.0-20230320061759-8cc1b52080c5/go.mod h1:exZ0C/1emQJAw5tHOaUDyY1ycttqBAPcxuzf7QbY6ec= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= -github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s= +github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= -github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM= -github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= -github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v28.5.2+incompatible h1:DBX0Y0zAjZbSrm1uzOkdr1onVghKaftjlSWt4AFexzM= +github.com/docker/docker v28.5.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= @@ -31,141 +47,153 @@ github.com/go-playground/validator/v10 v10.11.2 h1:q3SHpufmypg+erIExEKUmsgmhDTyh github.com/go-playground/validator/v10 v10.11.2/go.mod h1:NieE624vt4SCTJtD87arVLvdmjPAeV8BQlHtMnw9D7s= github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY= github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= -github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 h1:/c3QmbOGMGTOumP2iT/rCwB7b0QDGLKzqOmktBjT+Is= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1/go.mod h1:5SN9VR2LTsRFsrEC6FHgRbTWrTHu6tqPeKxEQv15giM= +github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= +github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg= github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/labstack/echo/v4 v4.10.2 h1:n1jAhnq/elIFTHr1EYpiYtyKgx4RW9ccVgkqByZaN2M= -github.com/labstack/echo/v4 v4.10.2/go.mod h1:OEyqf2//K1DFdE57vw2DRgWY0M7s65IVQO2FzvI4J5k= -github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8= -github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/labstack/echo/v4 v4.15.0 h1:hoRTKWcnR5STXZFe9BmYun9AMTNeSbjHi2vtDuADJ24= +github.com/labstack/echo/v4 v4.15.0/go.mod h1:xmw1clThob0BSVRX1CRQkGQ/vjwcpOMjQZSZa9fKA/c= +github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0= +github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU= github.com/leodido/go-urn v1.2.2 h1:7z68G0FCGvDk646jz1AelTYNYWrTNm0bEcFAo147wt4= github.com/leodido/go-urn v1.2.2/go.mod h1:kUaIbLZWttglzwNuG0pgsh5vuV6u2YcGBYz1hIPjtOQ= -github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/multiformats/go-multistream v0.6.1 h1:4aoX5v6T+yWmc2raBHsTvzmFhOI8WVOer28DeBBEYdQ= +github.com/multiformats/go-multistream v0.6.1/go.mod h1:ksQf6kqHAb6zIsyw7Zm+gAuVo57Qbq84E27YlYqavqw= +github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY= +github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM= -github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/openwall/yescrypt-go v1.0.0 h1:jsGk48zkFvtUjGVOhYPGh+CS595JmTRcKnpggK2AON4= +github.com/openwall/yescrypt-go v1.0.0/go.mod h1:e6CWtFizUEOUttaOjeVMiv1lJaJie3mfOtLJ9CCD6sA= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.13.5 h1:a3RLUqkyjYRtBTZJZ1VRrKbN3zhuPLlUc3sphVz81go= -github.com/pkg/sftp v1.13.5/go.mod h1:wHDZ0IZX6JcBYRK1TH9bcVq8G7TLpVHYIGJRFnmPfxg= +github.com/pkg/sftp v1.13.10 h1:+5FbKNTe5Z9aspU88DPIKJ9z2KZoaGCu6Sr6kKR/5mU= +github.com/pkg/sftp v1.13.10/go.mod h1:bJ1a7uDhrX/4OII+agvy28lzRvQrmIQuaHrcI1HbeGA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/rwtodd/Go.Sed v0.0.0-20210816025313-55464686f9ef/go.mod h1:8AEUvGVi2uQ5b24BIhcr0GCcpd/RNAFWaN2CJFrWIIQ= github.com/sethvargo/go-envconfig v0.9.0 h1:Q6FQ6hVEeTECULvkJZakq3dZMeBQ3JUpcKMfPQbKMDE= github.com/sethvargo/go-envconfig v0.9.0/go.mod h1:Iz1Gy1Sf3T64TQlJSvee81qDhf7YIlt8GMUX6yyNFs0= github.com/shellhub-io/ssh v0.0.0-20230224143412-edd48dfd6eea h1:7tEI9nukSYZViCjdVMYyCM67hh4mcKfEEbraWmd9xGQ= github.com/shellhub-io/ssh v0.0.0-20230224143412-edd48dfd6eea/go.mod h1:8XB4KraRrX39qHhT6yxPsHedjA08I/uBVwj4xC+/+z4= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= -github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w= +github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g= +github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= +github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= +github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 h1:Xs2Ncz0gNihqu9iosIZ5SkBbWo5T8JhhLJFMQL1qmLI= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0/go.mod h1:vy+2G/6NvVMpwGX/NyLqcC41fxepnuKHk16E6IZUcJc= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0 h1:1u/AyyOqAWzy+SkPxDpahCNZParHV8Vid1RnI2clyDE= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0/go.mod h1:z46paqbJ9l7c9fIPCXTqTGwhQZ5XoTIsfeFYWboizjs= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0 h1:1wp/gyxsuYtuE/JFxsQRtcCDtMrO2qMvlfXALU5wkzI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0/go.mod h1:gbTHmghkGgqxMomVQQMur1Nba4M0MQ8AYThXDUjsJ38= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/sdk v1.26.0 h1:Y7bumHf5tAiDlRYFmGqetNcLaVUZmh4iYfmGxtmz7F8= +go.opentelemetry.io/otel/sdk v1.26.0/go.mod h1:0p8MXpqLeJ0pzcszQQN4F0S5FVjBLgypeGSngLsmirs= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +go.opentelemetry.io/proto/otlp v1.2.0 h1:pVeZGk7nXDC9O2hncA6nHldxEjm6LByfA2aN8IOkz94= +go.opentelemetry.io/proto/otlp v1.2.0/go.mod h1:gGpR8txAl5M03pDhMC79G6SdqNV26naRm/KDsgaHD8A= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= +golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= +golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220825204002-c680a09ffe64/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= -golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= +golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= +golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= +golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= -golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de h1:jFNzHPIeuzhdRwVhbZdiym9q0ory/xY3sA+v2wPg8I0= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:5iCWqnniDlqZHrd3neWVTOwvh/v6s3232omMecelax8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/grpc v1.67.0 h1:IdH9y6PF5MPSdAntIcpjQ+tXO41pcQsfZV2RxtQgVcw= +google.golang.org/grpc v1.67.0/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= +google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/v3 v3.5.0 h1:Ljk6PdHdOhAb5aDMWXjDLMMhph+BpztA4v1QdqEW2eY= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= diff --git a/agent/handlers.go b/agent/handlers.go new file mode 100644 index 00000000000..1f54cff350b --- /dev/null +++ b/agent/handlers.go @@ -0,0 +1,408 @@ +package main + +import ( + "context" + "fmt" + "io" + "net" + "net/http" + "net/netip" + "sync" + + dockerclient "github.com/docker/docker/client" + "github.com/labstack/echo/v4" + "github.com/pkg/errors" + "github.com/shellhub-io/shellhub/agent/pkg/tunnel" + log "github.com/sirupsen/logrus" +) + +const ( + // HandleSSHOpenV2 is the protocol used to open a new SSH connection. + HandleSSHOpenV2 = "/ssh/open/1.0.0" + // HandleSSHCloseV2 is the protocol used to close an existing SSH connection. + HandleSSHCloseV2 = "/ssh/close/1.0.0" + // HandleHTTPProxyV2 is the protocol used to open a new HTTP proxy connection. + HandleHTTPProxyV2 = "/http/proxy/1.0.0" +) + +// httpProxyHandlerV2 handlers proxy connections to the required address. +func httpProxyHandlerV2(agent *Agent) tunnel.HandlerFunc { + const ProxyHandlerNetwork = "tcp" + + return func(ctx tunnel.Context, rwc io.ReadWriteCloser) error { + headers, err := ctx.Headers() + if err != nil { + log.WithError(err).Error("failed to get the headers from the connection") + + return err + } + + id := headers["id"] + host := headers["host"] + port := headers["port"] + + logger := log.WithFields(log.Fields{ + "id": id, + "host": host, + "port": port, + }) + + if _, ok := agent.mode.(*ConnectorMode); ok { + cli, err := dockerclient.NewClientWithOpts(dockerclient.FromEnv, dockerclient.WithAPIVersionNegotiation()) + if err != nil { + log.WithError(err).Error("failed to create the Docker client") + + return ctx.Error(errors.New("failed to connect to the Docker Engine")) + } + + container, err := cli.ContainerInspect(context.Background(), agent.server.ContainerID) + if err != nil { + log.WithError(err).Error("failed to inspect the container") + + return ctx.Error(errors.New("failed to inspect the container")) + } + + var target string + + addr, err := netip.ParseAddr(host) + if err != nil { + log.WithError(err).Error("failed to parse the address on proxy") + + return ctx.Error(errors.New("failed to parse the address on proxy")) + } + + if addr.IsLoopback() { + log.Trace("host is a loopback address, using the container IP address") + + for _, network := range container.NetworkSettings.Networks { + target = network.IPAddress + + break + } + } else { + for _, network := range container.NetworkSettings.Networks { + subnet, err := netip.ParsePrefix(fmt.Sprintf("%s/%d", network.Gateway, network.IPPrefixLen)) + if err != nil { + logger.WithError(err).Error("failed to parse the gateway on proxy") + + continue + } + + ip, err := netip.ParseAddr(host) + if err != nil { + logger.WithError(err).Error("failed to parse the address on proxy") + + continue + } + + if subnet.Contains(ip) { + target = ip.String() + + break + } + } + } + + if target == "" { + return ctx.Error(errors.New("address not found on the device")) + } + + host = target + } + + ErrFailedDialToAddressAndPort := errors.New("failed to dial to the address and port") + + logger.Trace("proxy handler connecting to the address") + + in, err := net.Dial(ProxyHandlerNetwork, net.JoinHostPort(host, port)) + if err != nil { + logger.WithError(err).Error("proxy handler failed to dial to the address") + + return ctx.Error(ErrFailedDialToAddressAndPort) + } + + defer in.Close() + + logger.Trace("proxy handler dialed to the address") + + // TODO: Add consts for status values. + if err := ctx.Status("ok"); err != nil { + logger.WithError(err).Error("proxy handler failed to send status response") + + return err + } + + wg := new(sync.WaitGroup) + done := sync.OnceFunc(func() { + defer in.Close() + defer rwc.Close() + + logger.Trace("close called on in and out connections") + }) + + wg.Add(1) + go func() { + defer done() + defer wg.Done() + + if _, err := io.Copy(in, rwc); err != nil && err != io.EOF { + logger.WithError(err).Error("proxy handler copy from rwc to in failed") + } + }() + + wg.Add(1) + go func() { + defer done() + defer wg.Done() + + if _, err := io.Copy(rwc, in); err != nil && err != io.EOF { + logger.WithError(err).Error("proxy handler copy from in to rwc failed") + } + }() + + logger.WithError(err).Info("proxy handler waiting for data pipe") + + wg.Wait() + + logger.WithError(err).Info("proxy handler done") + + return nil + } +} + +func sshHandlerV2(agent *Agent) tunnel.HandlerFunc { + return func(ctx tunnel.Context, rwc io.ReadWriteCloser) error { + defer rwc.Close() + + headers, err := ctx.Headers() + if err != nil { + log.WithError(err).Error("failed to get the headers from the connection") + + return err + } + + id := headers["id"] + + conn, ok := rwc.(net.Conn) + if !ok { + log.Error("failed to cast the ReadWriteCloser to net.Conn") + + return errors.New("failed to cast the ReadWriteCloser to net.Conn") + } + + agent.server.Sessions.Store(id, conn) + agent.server.HandleConn(conn) + + return nil + } +} + +func sshCloseHandlerV2(agent *Agent) tunnel.HandlerFunc { + return func(ctx tunnel.Context, rwc io.ReadWriteCloser) error { + defer rwc.Close() + + headers, err := ctx.Headers() + if err != nil { + log.WithError(err).Error("failed to get the headers from the connection") + + return err + } + + id := headers["id"] + + agent.server.CloseSession(id) + + log.WithFields( + log.Fields{ + "id": id, + "version": AgentVersion, + "tenant_id": agent.authData.Namespace, + "server_address": agent.config.ServerAddress, + }, + ).Info("A tunnel connection was closed") + + return nil + } +} + +const ( + HandleSSHOpenV1 = "GET:///ssh/:id" + HandleSSHCloseV1 = "GET:///ssh/close/:id" + HandleHTTPProxyV1 = "CONNECT:///http/proxy/:addr" +) + +func httpProxyHandlerV1(agent *Agent) func(c echo.Context) error { + const ProxyHandlerNetwork = "tcp" + + return func(c echo.Context) error { + logger := log.WithFields(log.Fields{ + "remote": c.Request().RemoteAddr, + "namespace": c.Request().Header.Get("X-Namespace"), + "path": c.Request().Header.Get("X-Path"), + "version": AgentVersion, + }) + + errorResponse := func(err error, msg string, code int) error { + logger.WithError(err).Debug(msg) + + return c.String(code, msg) + } + + host, port, err := net.SplitHostPort(c.Param("addr")) + if err != nil { + return errorResponse(err, "failed because address is invalid", http.StatusInternalServerError) + } + + if _, ok := agent.mode.(*ConnectorMode); ok { + cli, err := dockerclient.NewClientWithOpts(dockerclient.FromEnv, dockerclient.WithAPIVersionNegotiation()) + if err != nil { + return errorResponse(err, "failed to connect to the Docker Engine", http.StatusInternalServerError) + } + + container, err := cli.ContainerInspect(context.Background(), agent.server.ContainerID) + if err != nil { + return errorResponse(err, "failed to inspect the container", http.StatusInternalServerError) + } + + var target string + + addr, err := netip.ParseAddr(host) + if err != nil { + return errorResponse(err, "failed to parse the for lookback checkage", http.StatusInternalServerError) + } + + if addr.IsLoopback() { + for _, network := range container.NetworkSettings.Networks { + target = network.IPAddress + + break + } + } else { + for _, network := range container.NetworkSettings.Networks { + subnet, err := netip.ParsePrefix(fmt.Sprintf("%s/%d", network.Gateway, network.IPPrefixLen)) + if err != nil { + logger.WithError(err).Trace("Failed to parse the gateway on proxy") + + continue + } + + ip, err := netip.ParseAddr(host) + if err != nil { + logger.WithError(err).Trace("Failed to parse the address on proxy") + + continue + } + + if subnet.Contains(ip) { + target = ip.String() + + break + } + } + } + + if target == "" { + return errorResponse(nil, "address not found on the device", http.StatusInternalServerError) + } + + host = target + } + + // NOTE: Gets the to address to connect to. This address can be just a port, :8080, or the host and port, + // localhost:8080. + addr := fmt.Sprintf("%s:%s", host, port) + + in, err := net.Dial(ProxyHandlerNetwork, addr) + if err != nil { + return errorResponse(err, "failed to connect to the server on device", http.StatusInternalServerError) + } + + defer in.Close() + + // NOTE: Inform to the connection that the dial was successfully. + if err := c.NoContent(http.StatusOK); err != nil { + return errorResponse(err, "failed to send the ok status code back to server", http.StatusInternalServerError) + } + + // NOTE: Hijacks the connection to control the data transferred to the client connected. This way, we don't + // depend upon anything externally, only the data. + out, _, err := c.Response().Hijack() + if err != nil { + return errorResponse(err, "failed to hijack connection", http.StatusInternalServerError) + } + + defer out.Close() // nolint:errcheck + + wg := new(sync.WaitGroup) + done := sync.OnceFunc(func() { + defer in.Close() + defer out.Close() + + logger.Trace("close called on in and out connections") + }) + + wg.Add(1) + go func() { + defer done() + defer wg.Done() + + io.Copy(in, out) //nolint:errcheck + }() + + wg.Add(1) + go func() { + defer done() + defer wg.Done() + + io.Copy(out, in) //nolint:errcheck + }() + + logger.WithError(err).Trace("proxy handler waiting for data pipe") + wg.Wait() + + logger.WithError(err).Trace("proxy handler done") + + return nil + } +} + +func sshHandlerV1(ag *Agent) func(c echo.Context) error { + return func(c echo.Context) error { + hj, ok := c.Response().Writer.(http.Hijacker) + if !ok { + return c.String(http.StatusInternalServerError, "webserver doesn't support hijacking") + } + + conn, _, err := hj.Hijack() + if err != nil { + return c.String(http.StatusInternalServerError, "failed to hijack connection") + } + + id := c.Param("id") + httpConn := c.Request().Context().Value("http-conn").(net.Conn) + ag.server.Sessions.Store(id, httpConn) + ag.server.HandleConn(httpConn) + + conn.Close() + + return nil + } +} + +func sshCloseHandlerV1(a *Agent) func(c echo.Context) error { + return func(c echo.Context) error { + id := c.Param("id") + a.server.CloseSession(id) + + log.WithFields( + log.Fields{ + "id": id, + "version": AgentVersion, + "tenant_id": a.authData.Namespace, + "server_address": a.config.ServerAddress, + }, + ).Info("A tunnel connection was closed") + + return nil + } +} diff --git a/agent/init_docker.go b/agent/init_docker.go index c78636f324f..65a7fe2e138 100644 --- a/agent/init_docker.go +++ b/agent/init_docker.go @@ -6,10 +6,22 @@ package main import ( "os" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/sysinfo" + "github.com/shellhub-io/shellhub/agent/pkg/sysinfo" ) +// AgentVersion store the version to be embed inside the binary. This is +// injected using `-ldflags` build option. +// +// go build -ldflags "-X main.AgentVersion=1.2.3" +// +// If set to `latest`, the auto-updating mechanism is disabled. This is intended +// to be used during development only. +var AgentVersion string + +// AgentPlatform stores what platform the agent is running on. This is injected in build time in the [ShellHub Agent] +// implementation. +// +// [ShellHub Agent]: https://github.com/shellhub-io/shellhub/tree/master/agent var AgentPlatform string func init() { @@ -19,6 +31,5 @@ func init() { AgentPlatform = "docker" } - osauth.DefaultShadowFilename = "/host/etc/shadow" sysinfo.DefaultOSReleaseFilename = "/host/etc/os-release" } diff --git a/agent/init_native.go b/agent/init_native.go index dc0efcc3564..f407d0e6de4 100644 --- a/agent/init_native.go +++ b/agent/init_native.go @@ -3,4 +3,21 @@ package main -var AgentPlatform = "native" +// AgentVersion store the version to be embed inside the binary. This is +// injected using `-ldflags` build option. +// +// go build -ldflags "-X main.AgentVersion=1.2.3" +// +// If set to `latest`, the auto-updating mechanism is disabled. This is intended +// to be used during development only. +var AgentVersion string + +// AgentPlatform stores what platform the agent is running on. This is injected in build time in the [ShellHub Agent] +// implementation. +// +// [ShellHub Agent]: https://github.com/shellhub-io/shellhub/tree/master/agent +var AgentPlatform string + +func init() { + AgentPlatform = "native" +} diff --git a/agent/main.go b/agent/main.go index 90a870cccec..f47107332a7 100644 --- a/agent/main.go +++ b/agent/main.go @@ -4,56 +4,30 @@ import ( "encoding/json" "fmt" "os" + "path" "runtime" "time" "github.com/Masterminds/semver" - "github.com/shellhub-io/shellhub/pkg/agent" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/selfupdater" + "github.com/shellhub-io/shellhub/agent/pkg/connector" + "github.com/shellhub-io/shellhub/agent/pkg/selfupdater" + "github.com/shellhub-io/shellhub/agent/server/modes/host/command" "github.com/shellhub-io/shellhub/pkg/envs" "github.com/shellhub-io/shellhub/pkg/loglevel" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" ) -// AgentVersion store the version to be embed inside the binary. This is -// injected using `-ldflags` build option (e.g: `go build -ldflags "-X -// main.AgentVersion=1.2.3"`). -// -// If set to `latest`, the auto-updating mechanism is disabled. This is intended -// to be used during development only. -var AgentVersion string - func main() { // Default command. rootCmd := &cobra.Command{ // nolint: exhaustruct Use: "agent", - Run: func(cmd *cobra.Command, args []string) { + Run: func(cmd *cobra.Command, _ []string) { loglevel.SetLogLevel() - // NOTE(r): When T, the generic parameter, is a structure with required tag, the fallback for an - // "unprefixed" parameter is used. - // - // For example, - // - // For the structure below, the parser will parse successfully when the variables exist with or without the - // prefixes since the "required" tag is set to true. - // - // SHELLHUB_TENANT_ID=00000000-0000-4000-0000-000000000000 SERVER_ADDRESS=http://127.0.0.1 - // PRIVATE_KEY=/tmp/shellhub sudo -E ./agent - // - // struct { - // ServerAddress string `env:"SERVER_ADDRESS,required"` - // PrivateKey string `env:"PRIVATE_KEY,required"` - // TenantID string `env:"TENANT_ID,required` - // } - // - // This behavior is driven by the [envconfig] package. Check it out for more information. - // - // [envconfig]: https://github.com/sethvargo/go-envconfig - cfg, err := envs.ParseWithPrefix[agent.Config]("SHELLHUB_") + cfg, fields, err := LoadConfigFromEnv() if err != nil { - log.Fatal(err) + log.WithError(err).WithFields(fields).Fatal("Failed to load de configuration from the environmental variables") } if os.Geteuid() == 0 && cfg.SingleUserPassword != "" { @@ -102,7 +76,7 @@ func main() { "mode": mode, }).Info("Starting ShellHub") - ag, err := agent.NewAgentWithConfig(cfg, new(agent.HostMode)) + ag, err := NewAgentWithConfig(cfg, new(HostMode)) if err != nil { log.WithError(err).WithFields(log.Fields{ "version": AgentVersion, @@ -119,30 +93,6 @@ func main() { ctx := cmd.Context() - go func() { - // NOTICE: We only start to ping the server when the agent is ready to accept connections. - // It will make the agent ping to server after the ticker time set on ping function, what is 10 minutes - // by default. - - if err := ag.Ping(ctx, 0); err != nil { - log.WithError(err).WithFields(log.Fields{ - "version": AgentVersion, - "mode": mode, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "preferred_hostname": cfg.PreferredHostname, - }).Fatal("Failed to ping server") - } - - log.WithFields(log.Fields{ - "version": AgentVersion, - "mode": mode, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "preferred_hostname": cfg.PreferredHostname, - }).Info("Stopped pinging server") - }() - log.WithFields(log.Fields{ "version": AgentVersion, "mode": mode, @@ -223,26 +173,111 @@ func main() { }, } + rootCmd.AddCommand(&cobra.Command{ + Use: "connector", + Short: "Starts the ShellHub Agent in Connector mode", + Run: func(cmd *cobra.Command, _ []string) { + updater, err := selfupdater.NewUpdater(AgentVersion) + if err != nil { + log.Panic(err) + } + + err = updater.CompleteUpdate() + if err != nil { + log.Warning(err) + os.Exit(0) + } + + currentVersion := new(semver.Version) + + if AgentVersion != "latest" { + currentVersion, err = updater.CurrentVersion() + if err != nil { + log.Panic(err) + } + } + + cfg, fields, err := LoadConfigConnectorFromEnv() + if err != nil { + log.WithError(err). + WithFields(fields). + Fatal("Failed to load de configuration from the environmental variables") + } + + logger := log.WithFields( + log.Fields{ + "address": cfg.ServerAddress, + "tenant_id": cfg.TenantID, + "private_keys": cfg.PrivateKeys, + "version": AgentVersion, + }, + ) + + cfg.PrivateKeys = path.Dir(cfg.PrivateKeys) + + logger.Info("Starting ShellHub Agent Connector") + + connector.ConnectorVersion = AgentVersion + connector, err := NewDockerConnector(cfg) + if err != nil { + logger.Fatal("Failed to create ShellHub Agent Connector") + } + + if AgentVersion != "latest" { + go func() { + for { + nextVersion, err := connector.CheckUpdate() + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "version": AgentVersion, + }).Error("Failed to check update") + + goto sleep + } + + if nextVersion.GreaterThan(currentVersion) { + if err := updater.ApplyUpdate(nextVersion); err != nil { + log.WithError(err).WithFields(log.Fields{ + "version": AgentVersion, + }).Error("Failed to apply update") + } + + log.WithFields(log.Fields{ + "version": currentVersion, + "next_version": nextVersion.String(), + }).Info("Update successfully applied") + } + + sleep: + log.WithFields(log.Fields{ + "version": AgentVersion, + }).Info("Sleeping for 24 hours") + + time.Sleep(time.Hour * 24) + } + }() + } + + if err := connector.Listen(cmd.Context()); err != nil { + logger.Fatal("Failed to listen for connections") + } + + logger.Info("ShellHub Agent Connector stopped") + }, + }) + rootCmd.AddCommand(&cobra.Command{ // nolint: exhaustruct Use: "info", Short: "Show information about the agent", - Run: func(cmd *cobra.Command, args []string) { + Run: func(cmd *cobra.Command, _ []string) { loglevel.SetLogLevel() - cfg, err := envs.ParseWithPrefix[agent.Config]("SHELLHUB_") + cfg, err := envs.ParseWithPrefix[Config]("SHELLHUB_") if err != nil { log.Fatal(err) } - ag, err := agent.NewAgentWithConfig(cfg, new(agent.HostMode)) - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "version": AgentVersion, - "configuration": cfg, - }).Fatal("Failed to create agent") - } - - info, err := ag.GetInfo() + info, err := GetInfo(cfg) if err != nil { log.WithError(err).WithFields(log.Fields{ "version": AgentVersion, @@ -276,8 +311,8 @@ func main() { Short: "Starts the SFTP server", Long: `Starts the SFTP server. This command is used internally by the agent and should not be used directly. It is initialized by the agent when a new SFTP session is created.`, - Run: func(cmd *cobra.Command, args []string) { - agent.NewSFTPServer() + Run: func(_ *cobra.Command, args []string) { + NewSFTPServer(command.SFTPServerMode(args[0])) }, }) @@ -287,8 +322,5 @@ It is initialized by the agent when a new SFTP session is created.`, runtime.Version(), )) - agent.AgentVersion = AgentVersion - agent.AgentPlatform = AgentPlatform - rootCmd.Execute() // nolint: errcheck } diff --git a/agent/modes.go b/agent/modes.go new file mode 100644 index 00000000000..5f49c6ff54c --- /dev/null +++ b/agent/modes.go @@ -0,0 +1,120 @@ +package main + +import ( + "context" + "os/exec" + + dockerclient "github.com/docker/docker/client" + "github.com/shellhub-io/shellhub/agent/pkg/sysinfo" + "github.com/shellhub-io/shellhub/agent/server" + "github.com/shellhub-io/shellhub/agent/server/modes/connector" + "github.com/shellhub-io/shellhub/agent/server/modes/host" +) + +type Info struct { + ID string + Name string +} + +// Mode is the Agent execution mode. +// +// Check [HostMode] and [ConnectorMode] for more information. +type Mode interface { + // Serve prepares the Agent for listening, setting up the SSH server, its modes and values on Agent's. + Serve(agent *Agent) + // GetInfo gets information about Agent according to Agent's mode. + // + // When Agent is running on [HostMode], the info got is from the system where the Agent is running, but when running + // in [ConnectorMode], the data is retrieved from Docker Engine. + GetInfo() (*Info, error) +} + +// HostMode is the Agent execution mode for `Host`. +// +// The host mode is the default mode one, and turns the host machine into a ShellHub's Agent. The host is +// responsible for the SSH server, authentication and authorization, `/etc/passwd`, `/etc/shadow`, and etc. +type HostMode struct{} + +var _ Mode = new(HostMode) + +func (m *HostMode) Serve(agent *Agent) { + agent.server = server.NewServer( + agent.cli, + &host.Mode{ + Authenticator: *host.NewAuthenticator(agent.cli, agent.authData, agent.config.SingleUserPassword, &agent.authData.Name), + Sessioner: *host.NewSessioner(&agent.authData.Name, make(map[string]*exec.Cmd)), + }, + &server.Config{ + PrivateKey: agent.config.PrivateKey, + KeepAliveInterval: agent.config.KeepAliveInterval, + Features: server.LocalPortForwardFeature, + }, + ) + + agent.server.SetDeviceName(agent.authData.Name) +} + +func (m *HostMode) GetInfo() (*Info, error) { + osrelease, err := sysinfo.GetOSRelease() + if err != nil { + return nil, err + } + + return &Info{ + ID: osrelease.ID, + Name: osrelease.Name, + }, nil +} + +// ConnectorMode is the Agent execution mode for `Connector`. +// +// The `Connector` mode is used to turn a container inside a host into a single device ShellHub's Agent. The host is +// responsible for the SSH server, but the authentication and authorization is made by either the conainer +// internals, `passwd` or `shadow`, or by the ShellHub API. +type ConnectorMode struct { + cli *dockerclient.Client + identity string +} + +func NewConnectorMode(cli *dockerclient.Client, identity string) (Mode, error) { + return &ConnectorMode{ + cli: cli, + identity: identity, + }, nil +} + +var _ Mode = new(ConnectorMode) + +func (m *ConnectorMode) Serve(agent *Agent) { + // NOTE: When the agent is running in `Connector` mode, we need to identify the container ID to maintain the + // communication between the server and the agent when the container name on the host changes. This information is + // saved inside the device's identity, avoiding significant changes in the current state of the agent. + // TODO: Evaluate if we can use another field than "MAC" to store the container ID. + agent.server = server.NewServer( + agent.cli, + &connector.Mode{ + Authenticator: *connector.NewAuthenticator(agent.cli, m.cli, agent.authData, &agent.Identity.MAC), + Sessioner: *connector.NewSessioner(&agent.Identity.MAC, m.cli), + }, + &server.Config{ + PrivateKey: agent.config.PrivateKey, + KeepAliveInterval: agent.config.KeepAliveInterval, + Features: server.NoFeature, + }, + ) + + agent.server.SetContainerID(agent.Identity.MAC) + agent.server.SetDeviceName(agent.authData.Name) +} + +func (m *ConnectorMode) GetInfo() (*Info, error) { + info, err := m.cli.ContainerInspect(context.Background(), m.identity) + if err != nil { + return nil, err + } + + return &Info{ + ID: "docker", + Name: info.Config.Image, + }, nil +} diff --git a/agent/packaging/config.json b/agent/packaging/config.json index 77755aef681..5ea895dd9fa 100644 --- a/agent/packaging/config.json +++ b/agent/packaging/config.json @@ -13,7 +13,10 @@ "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", "SHELLHUB_SERVER_ADDRESS=__SERVER_ADDRESS__", "SHELLHUB_TENANT_ID=__TENANT_ID__", - "SHELLHUB_PRIVATE_KEY=/host/etc/shellhub.key" + "SHELLHUB_PRIVATE_KEY=/host/etc/shellhub.key", + "__PREFERRED_HOSTNAME__", + "__PREFERRED_IDENTITY__", + "__KEEPALIVE_INTERVAL__" ], "cwd": "/", "capabilities": { diff --git a/agent/pkg/connector/connector.go b/agent/pkg/connector/connector.go new file mode 100644 index 00000000000..80708999134 --- /dev/null +++ b/agent/pkg/connector/connector.go @@ -0,0 +1,42 @@ +package connector + +import ( + "context" + + "github.com/Masterminds/semver" +) + +// ConnectorVersion stores the version of the ShellHub Instane that is running the connector. +// It is used in the ShellHub Agents initialized by the connector when a container is started. +var ConnectorVersion string + +// Container is a struct that represents a container that will be managed by the connector. +type Container struct { + // ID is the container ID. + ID string + // Name is the container name. + Name string + // ServerAddress is the ShellHub address of the server that the agent will connect to. + ServerAddress string + // Tenant is the tenant ID of the namespace that the agent belongs to. + Tenant string + // PrivateKey is the private key of the device. Specify the path to store the container private key. If not + // provided, the agent will generate a new one. This is required. + PrivateKey string + // Cancel is a function that is used to stop the goroutine that is running the agent for this container. + Cancel context.CancelFunc +} + +// Connector is an interface that defines the methods that a connector must implement. +type Connector interface { + // List lists all containers running on the host. + List(ctx context.Context) ([]Container, error) + // Start starts the agent for the container with the given ID. + Start(ctx context.Context, id string, name string) + // Stop stops the agent for the container with the given ID. + Stop(ctx context.Context, id string) + // Listen listens for events and starts or stops the agent for the container that was created or removed. + Listen(ctx context.Context) error + // Checks for Connector's update. + CheckUpdate() (*semver.Version, error) +} diff --git a/pkg/agent/pkg/keygen/keygen.go b/agent/pkg/keygen/keygen.go similarity index 100% rename from pkg/agent/pkg/keygen/keygen.go rename to agent/pkg/keygen/keygen.go diff --git a/agent/pkg/osauth/auth.go b/agent/pkg/osauth/auth.go new file mode 100644 index 00000000000..1a2f4522c94 --- /dev/null +++ b/agent/pkg/osauth/auth.go @@ -0,0 +1,453 @@ +//go:build !freebsd +// +build !freebsd + +package osauth + +import ( + "bufio" + "fmt" + "io" + "os" + "os/user" + "slices" + "strconv" + "strings" + + "github.com/GehirnInc/crypt" + _ "github.com/GehirnInc/crypt/md5_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages + _ "github.com/GehirnInc/crypt/sha256_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages + _ "github.com/GehirnInc/crypt/sha512_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages + "github.com/shellhub-io/shellhub/agent/pkg/yescrypt" + "github.com/sirupsen/logrus" +) + +var ( + // Default file path for shadow file. + DefaultShadowFilename = "/etc/shadow" + // Default file path for passwd file. + DefaultPasswdFilename = "/etc/passwd" + // Default file path for group file. + DefaultGroupFilename = "/etc/group" +) + +var DefaultBackend Backend + +type backend struct{} + +func (b *backend) AuthUser(username, password string) bool { + file, err := os.Open(DefaultShadowFilename) + if err != nil { + return false + } + defer file.Close() + + return AuthUserFromShadow(username, password, file) +} + +func (b *backend) LookupUser(username string) (*User, error) { + file, err := os.Open(DefaultPasswdFilename) + if err != nil { + return nil, err + } + defer file.Close() + + return LookupUserFromPasswd(username, file) +} + +func (b *backend) ListGroups(username string) ([]uint32, error) { + user, err := b.LookupUser(username) + if err != nil { + return nil, err + } + + groups := []uint32{user.GID} + + groupFile, err := os.Open(DefaultGroupFilename) + if err != nil { + return nil, err + } + defer groupFile.Close() + + secondaryGroups, err := ListGroupsFromFile(username, groupFile) + if err != nil { + return nil, err + } + + for _, gid := range secondaryGroups { + if gid != user.GID { + groups = append(groups, gid) + } + } + + return groups, nil +} + +func ListGroupsFromFile(username string, group io.Reader) ([]uint32, error) { + groups, err := parseGroupReader(group) + if err != nil { + logrus.WithError(err).Error("Error parsing group file") + + return nil, err + } + + var userGroups []uint32 + for _, g := range groups { + if slices.Contains(g.Members, username) { + userGroups = append(userGroups, g.GID) + } + } + + return userGroups, nil +} + +type Group struct { + Name string // The name of the group. + Password string // The password of the group. + GID uint32 // The group ID of the group. + Members []string // The list of members in the group. +} + +func parseGroupLine(line string) (Group, error) { + result := Group{} + parts := strings.Split(strings.TrimSpace(line), ":") + if len(parts) != 4 { + return result, fmt.Errorf("group line had wrong number of parts %d != 4", len(parts)) + } + result.Name = strings.TrimSpace(parts[0]) + result.Password = strings.TrimSpace(parts[1]) + + gid, err := parseUint32(parts[2]) + if err != nil { + return result, fmt.Errorf("group line had badly formatted gid %s", parts[2]) + } + result.GID = gid + + members := strings.TrimSpace(parts[3]) + if members != "" { + result.Members = strings.Split(members, ",") + } else { + result.Members = []string{} + } + + return result, nil +} + +func parseGroupReader(r io.Reader) (map[string]Group, error) { + lines := bufio.NewReader(r) + entries := make(map[string]Group) + for { + line, _, err := lines.ReadLine() + if err != nil { + break + } + + if len(line) == 0 || strings.HasPrefix(string(line), "#") { + continue + } + + entry, err := parseGroupLine(string(line)) + if err != nil { + return nil, err + } + + entries[entry.Name] = entry + } + + return entries, nil //nolint:nilerr +} + +func init() { + DefaultBackend = &backend{} +} + +// This struct represents an entry from the `/etc/shadow` file. +type shadowEntry struct { + // The login name of the account (same as in [PasswdEntry]). + Username string + // The hashed password of the account (same as in [PasswdEntry]). + Password string + // The number of days since January 1, 1970 (epoch) that the password was last changed. + Lastchanged int + // The minimum number of days required between password changes i.e. the number of days left before the user is + // allowed to change his/her password. + Minimum int + // The maximum number of days the password is valid (after that user is forced to change his/her password). + Maximum int + // The number of days before password is to expire that user is warned that his/her password must be changed. + Warn int + // The number of days after password expires that account is disabled. + Inactive int + // Days since Jan 1, 1970 that account is disabled i.e. an absolute date specifying when the login may no longer be + // used. + Expire int +} + +// AuthUser attempts to authenticate username and password from [DefaultPasswdFilename]. +func AuthUser(username, password string) bool { + return DefaultBackend.AuthUser(username, password) +} + +// LookupUser try to find a [PasswordEntry] for a username from a [DefaultPasswdFilename]. +func LookupUser(username string) (*User, error) { + return DefaultBackend.LookupUser(username) +} + +// ListGroups returns a list of group IDs that the user belongs to. +func ListGroups(username string) ([]uint32, error) { + return DefaultBackend.ListGroups(username) +} + +// AuthUserFromShadow attempts to authenticate username and password from file. +func AuthUserFromShadow(username, password string, shadow io.Reader) bool { + entries, err := parseShadowReader(shadow) + if err != nil { + logrus.WithError(err).Debug("Error parsing shadow file") + + return false + } + + entry, ok := entries[username] + if !ok { + logrus.WithFields(logrus.Fields{ + "username": username, + }).Error("User not found") + + return false + } + + return VerifyPasswordHash(entry.Password, password) +} + +// LookupUserFromPasswd try to find a [PasswordEntry] for a username from a passwd file. +func LookupUserFromPasswd(username string, passwd io.Reader) (*User, error) { + if os.Geteuid() != 0 { + return singleUser(), nil + } + + entries, err := parsePasswdReader(passwd) + if err != nil { + logrus.WithError(err).Error("Error parsing passwd file") + + return nil, err + } + + user, found := entries[username] + if !found { + logrus.WithFields(logrus.Fields{ + "username": username, + }).Error("User not found in passwd file") + + return nil, ErrUserNotFound + } + + return &user, nil +} + +// VerifyPasswordHash checks if the password match with the hash. +func VerifyPasswordHash(hash, password string) bool { + // NOTE: If the password field is empty, the user can log in without a password. However, some applications that + // read the /etc/shadow file might block access if the password field is empty. + // https://man7.org/linux/man-pages/man5/shadow.5.html + if hash == "" { + if PermitEmptyPasswords() { + // NOTE: We allow login with empty password if the environment variable SHELLHUB_PERMIT_EMPTY_PASSWORDS is set to true. + logrus.Warn("User logged in with empty password") + + return true + } + + // NOTE: By default, we dont allow login with empty password. + logrus.Error("User cannot login with empty password") + + return false + } + + // NOTE: If the password field contains a string that is not a valid result of crypt(3), for instance ! or *, the + // user cannot use a UNIX password to log in. However, the user may log in the system by other means. + // https://man7.org/linux/man-pages/man5/shadow.5.html + if hash == "!" || hash == "*" { + logrus.Error("User cannot login with password") + + return false + } + + // NOTE: If the password field begins with an exclamation mark !, the password is locked. The remaining characters + // on the line represent the password field before the password was locked. + // https://man7.org/linux/man-pages/man5/shadow.5.html + if strings.HasPrefix(hash, "!") { + logrus.Error("Password is locked") + + return false + } + + // NOTE: If hash algorithm is yescrypt, we verify by ourselves, otherwise let's try crypt package. + if strings.HasPrefix(hash, "$y$") { + return yescrypt.Verify(password, hash) + } + + if ok := crypt.IsHashSupported(hash); !ok { + logrus.Error("The crypto algorithm is not supported") + + return false + } + + crypt := crypt.NewFromHash(hash) + if crypt == nil { + logrus.Error("Could not detect password crypto algorithm from shadow entry") + + return false + } + + if err := crypt.Verify(hash, []byte(password)); err != nil { + logrus.WithError(err).Debug("Error verifying password hash") + + return false + } + + return true +} + +func parseShadowReader(r io.Reader) (map[string]shadowEntry, error) { + lines := bufio.NewReader(r) + entries := make(map[string]shadowEntry) + + for { + line, _, err := lines.ReadLine() + if err != nil { + break + } + + if len(line) == 0 || strings.HasPrefix(string(line), "#") { + continue + } + + entry, err := parseShadowLine(string(line)) + if err != nil { + return nil, err + } + + entries[entry.Username] = entry + } + + return entries, nil //nolint:nilerr +} + +func parseShadowLine(line string) (shadowEntry, error) { + result := shadowEntry{} + parts := strings.Split(strings.TrimSpace(line), ":") + if len(parts) != 9 { + return result, fmt.Errorf("shadow line had wrong number of parts %d != 9", len(parts)) + } + + result.Username = strings.TrimSpace(parts[0]) + result.Password = strings.TrimSpace(parts[1]) + + result.Lastchanged = parseIntString(parts[2]) + result.Minimum = parseIntString(parts[3]) + result.Maximum = parseIntString(parts[4]) + result.Warn = parseIntString(parts[5]) + result.Inactive = parseIntString(parts[6]) + result.Expire = parseIntString(parts[7]) + + return result, nil +} + +func parseIntString(value string) int { + if value == "" { + return 0 + } + + number, err := strconv.Atoi(strings.TrimSpace(value)) + if err != nil { + return 0 + } + + return number +} + +func parseUint32(value string) (uint32, error) { + // NOTE: [strconv.Atoi] uses the [strconv.ParseInt] under the hood to do the conversion. + parsed, err := strconv.ParseUint(value, 10, 32) + if err != nil { + return 0, err + } + + return uint32(parsed), nil //nolint:gosec // ParseUint with the parameters specified guarantee the parsing of only 32 bits. +} + +func singleUser() *User { + var uid, gid uint32 + var username, name, homeDir, shell string + u, err := user.Current() + uid, _ = parseUint32(os.Getenv("UID")) + homeDir = os.Getenv("HOME") + shell = os.Getenv("SHELL") + if err == nil { + uid, _ = parseUint32(u.Uid) + gid, _ = parseUint32(u.Gid) + username = u.Username + name = u.Name + homeDir = u.HomeDir + } + + return &User{ + UID: uid, + GID: gid, + Username: username, + Name: name, + HomeDir: homeDir, + Shell: shell, + } +} + +func parsePasswdReader(r io.Reader) (map[string]User, error) { + lines := bufio.NewReader(r) + entries := make(map[string]User) + for { + line, _, err := lines.ReadLine() + if err != nil { + break + } + + if len(line) == 0 || strings.HasPrefix(string(line), "#") { + continue + } + + entry, err := parsePasswdLine(string(line)) + if err != nil { + return nil, err + } + + entries[entry.Username] = entry + } + + return entries, nil //nolint:nilerr +} + +func parsePasswdLine(line string) (User, error) { + result := User{} + parts := strings.Split(strings.TrimSpace(line), ":") + if len(parts) != 7 { + return result, fmt.Errorf("passwd line had wrong number of parts %d != 7", len(parts)) + } + result.Username = strings.TrimSpace(parts[0]) + result.Password = strings.TrimSpace(parts[1]) + + uid, err := parseUint32(parts[2]) + if err != nil { + return result, fmt.Errorf("passwd line had badly formatted uid %s", parts[2]) + } + result.UID = uid + + gid, err := parseUint32(parts[3]) + if err != nil { + return result, fmt.Errorf("passwd line had badly formatted gid %s", parts[3]) + } + result.GID = gid + + result.Name = strings.TrimSpace(parts[4]) + result.HomeDir = strings.TrimSpace(parts[5]) + result.Shell = strings.TrimSpace(parts[6]) + + return result, nil +} diff --git a/agent/pkg/osauth/auth_freebsd.go b/agent/pkg/osauth/auth_freebsd.go new file mode 100644 index 00000000000..d3efc58c30b --- /dev/null +++ b/agent/pkg/osauth/auth_freebsd.go @@ -0,0 +1,197 @@ +//go:build freebsd +// +build freebsd + +package osauth + +import ( + "bufio" + "fmt" + "io" + "os" + "strconv" + "strings" + + "github.com/GehirnInc/crypt" + _ "github.com/GehirnInc/crypt/md5_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages + _ "github.com/GehirnInc/crypt/sha256_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages + _ "github.com/GehirnInc/crypt/sha512_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages + log "github.com/sirupsen/logrus" +) + +var DefaultMasterPasswdFilename = "/etc/master.passwd" + +var DefaultBackend Backend + +type backend struct{} + +func (b *backend) AuthUser(username, password string) bool { + file, err := os.Open(DefaultMasterPasswdFilename) + if err != nil { + return false + } + + return AuthUserFromShadow(username, password, file) +} + +func (b *backend) LookupUser(username string) (*User, error) { + file, err := os.Open(DefaultMasterPasswdFilename) + if err != nil { + return nil, err + } + + return LookupUserFromPasswd(username, file) +} + +func (b *backend) ListGroups(username string) ([]string, error) { + // Not implemented on FreeBSD + return nil, nil +} + +func init() { + DefaultBackend = &backend{} +} + +// AuthUser attempts to authenticate username and password from the [DefaultMasterPasswdFilename]. +func AuthUser(username, password string) bool { + return DefaultBackend.AuthUser(username, password) +} + +// LookupUser try to find a [PasswordEntry] for a username from the [DefaultMasterPasswdFilename]. +func LookupUser(username string) (*User, error) { + return DefaultBackend.LookupUser(username) +} + +// AuthUserFromShadow attempts to authenticate username and password from file. +func AuthUserFromShadow(username, password string, shadow io.Reader) bool { + entries, err := parseMasterPasswdReader(shadow) + if err != nil { + log.WithError(err).Error("Error parsing passwd file") + + return false + } + + user, found := entries[username] + if !found { + log.WithFields(log.Fields{ + "username": username, + }).Error("User not found in passwd file") + + return false + } + + return VerifyPasswordHash(user.Password, password) +} + +// Lookup try to find a [PasswordEntry] for a username from a passwd file. +func LookupUserFromPasswd(username string, passwd io.Reader) (*User, error) { + entries, err := parseMasterPasswdReader(passwd) + if err != nil { + log.WithError(err).Error("Error parsing passwd file") + + return nil, err + } + + user, found := entries[username] + if !found { + log.WithFields(log.Fields{ + "username": username, + }).Error("User not found in passwd file") + + return nil, ErrUserNotFound + } + + return &user, nil +} + +func parseMasterPasswdReader(r io.Reader) (map[string]User, error) { + lines := bufio.NewReader(r) + entries := make(map[string]User) + for { + line, _, err := lines.ReadLine() + if err != nil { + break + } + + if len(line) == 0 || strings.HasPrefix(string(line), "#") { + continue + } + + entry, err := parseMasterPasswdLine(string(line)) + if err != nil { + return nil, err + } + + entries[entry.Username] = entry + } + + return entries, nil //nolint:nilerr +} + +func parseMasterPasswdLine(line string) (User, error) { + result := User{} + parts := strings.Split(strings.TrimSpace(line), ":") + if len(parts) != 10 { + return result, fmt.Errorf("passwd line had wrong number of parts %d != 10", len(parts)) + } + result.Username = strings.TrimSpace(parts[0]) + result.Password = strings.TrimSpace(parts[1]) + + uid, err := strconv.Atoi(parts[2]) + if err != nil { + return result, fmt.Errorf("passwd line had badly formatted uid %s", parts[2]) + } + result.UID = uint32(uid) + + gid, err := strconv.Atoi(parts[3]) + if err != nil { + return result, fmt.Errorf("passwd line had badly formatted gid %s", parts[3]) + } + result.GID = uint32(gid) + + // result.Name = strings.TrimSpace(parts[4]) + + result.HomeDir = strings.TrimSpace(parts[8]) + result.Shell = strings.TrimSpace(parts[9]) + + return result, nil +} + +// VerifyPasswordHash checks if the password match with the hash. +func VerifyPasswordHash(hash, password string) bool { + if hash == "" && password == "" { + return true + } + + if password == "" && (hash == "*LOCKED*" || hash == "*") { + log.Error("Password is locked") + + return false + } + + if hash != "" && password == "" { + log.Error("Password entry is empty") + + return false + } + + if ok := crypt.IsHashSupported(hash); !ok { + log.Error("The crypto algorithm is not supported") + + return false + } + + crypt := crypt.NewFromHash(hash) + if crypt == nil { + log.Error("Could not detect password crypto algorithm from shadow entry") + + return false + } + + if err := crypt.Verify(hash, []byte(password)); err != nil { + log.WithError(err).Debug("Error verifying password hash") + + return false + } + + return true +} diff --git a/agent/pkg/osauth/auth_test.go b/agent/pkg/osauth/auth_test.go new file mode 100644 index 00000000000..797916ac87e --- /dev/null +++ b/agent/pkg/osauth/auth_test.go @@ -0,0 +1,404 @@ +//go:build !freebsd +// +build !freebsd + +package osauth + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestVerifyPasswordHashPass(t *testing.T) { + hashPassword := "$6$CMWxpgkq.ZosUW8N$gN/MkheCdS9SsPrFS6oOd/k.TMvY2KHztJE5pDMRdN35zr00dyxQr3pYGM4rtPPduUIrEFCwuB7oVgzDbiMfN." //nolint:gosec + passwd := "123" + + result := VerifyPasswordHash(hashPassword, passwd) + + assert.True(t, result) +} + +func TestVerifyPasswordHashFail(t *testing.T) { + hashPassword := "$6$CMWxpgkq.ZosUW8N$gN/MkheCdS9SsPrFS6oOd/k.TMvY2KHztJE5pDMRdN35zr00dyxQr3pYGM4rtPPduUIrEFCwuB7oVgzDbiMfN." //nolint:gosec + passwd := "test" + + result := VerifyPasswordHash(hashPassword, passwd) + + assert.False(t, result) +} + +func TestVerifyPasswordHashMD5Pass(t *testing.T) { + hashPassword := "$1$YW4a91HG$31CtH9bzW/oyJ1VOD.H/d/" //nolint:gosec + passwd := "test" + + result := VerifyPasswordHash(hashPassword, passwd) + + assert.True(t, result) +} + +func TestVerifyPasswordHash(t *testing.T) { + tests := []struct { + name string + hash string + password string + want bool + }{ + { + name: "sha512 correct", + hash: "$6$CMWxpgkq.ZosUW8N$gN/MkheCdS9SsPrFS6oOd/k.TMvY2KHztJE5pDMRdN35zr00dyxQr3pYGM4rtPPduUIrEFCwuB7oVgzDbiMfN.", //nolint:gosec + password: "123", + want: true, + }, + { + name: "sha512 incorrect", + hash: "$6$CMWxpgkq.ZosUW8N$gN/MkheCdS9SsPrFS6oOd/k.TMvY2KHztJE5pDMRdN35zr00dyxQr3pYGM4rtPPduUIrEFCwuB7oVgzDbiMfN.", //nolint:gosec + password: "test", + want: false, + }, + { + name: "md5 correct", + hash: "$1$YW4a91HG$31CtH9bzW/oyJ1VOD.H/d/", //nolint:gosec + password: "test", + want: true, + }, + { + name: "empty hash", + hash: "", + password: "any", + want: false, + }, + { + name: "special marker bang", + hash: "!", + password: "pass", + want: false, + }, + { + name: "special marker star", + hash: "*", + password: "pass", + want: false, + }, + { + name: "locked prefix", + hash: "!$6$blah", + password: "pass", + want: false, + }, + { + name: "unsupported algo", + hash: "$z$invalid$hash", + password: "pass", + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := VerifyPasswordHash(tt.hash, tt.password) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestVerifyPasswordHash_YescryptInvocation(t *testing.T) { + // NOTE: This test only ensures the yescrypt branch executes without panicking. + // Avoid asserting true/false because yescrypt parameters may vary across + // environments and producing a deterministic yescrypt hash in tests is + // environment-dependent. + yesHash := "$y$e0801$w1Jl9GJH1j4h0w==$Wj2b7m2vWw2m3l1iQe8qvQ==" + _ = VerifyPasswordHash(yesHash, "password") +} + +// nolint:gosec +const passwd = `root:x:0:0:root:/root:/bin/bash +daemon:x:1:1:daemon:/usr/sbin:/usr/sbin/nologin +bin:x:2:2:bin:/bin:/usr/sbin/nologin +sys:x:3:3:sys:/dev:/usr/sbin/nologin +sync:x:4:65534:sync:/bin:/bin/sync +nobody:x:65534:65534:nobody:/nonexistent:/usr/sbin/nologin +user1:x:1000:1000:User One:/home/user1:/bin/bash +user2:x:1001:1001:User Two:/home/user2:/bin/sh` + +func TestPasswdReader(t *testing.T) { + reader := strings.NewReader(passwd) + + users, err := parsePasswdReader(reader) + assert.NoError(t, err) + assert.Equal(t, 8, len(users)) + + tests := []struct { + name string + username string + want struct { + Password string + UID uint32 + GID uint32 + Shell string + } + }{ + { + name: "root user", + username: "root", + want: struct { + Password string + UID uint32 + GID uint32 + Shell string + }{ + Password: "x", + UID: 0, + GID: 0, + Shell: "/bin/bash", + }, + }, + { + name: "user1", + username: "user1", + want: struct { + Password string + UID uint32 + GID uint32 + Shell string + }{ + Password: "x", + UID: 1000, + GID: 1000, + Shell: "/bin/bash", + }, + }, + { + name: "user2", + username: "user2", + want: struct { + Password string + UID uint32 + GID uint32 + Shell string + }{ + Password: "x", + UID: 1001, + GID: 1001, + Shell: "/bin/sh", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + user, ok := users[tt.username] + assert.True(t, ok) + assert.Equal(t, tt.want.Password, user.Password) + assert.Equal(t, tt.want.UID, user.UID) + assert.Equal(t, tt.want.GID, user.GID) + assert.Equal(t, tt.want.Shell, user.Shell) + }) + } +} + +func TestParseIntString(t *testing.T) { + tests := []struct { + name string + input string + want int + }{ + { + name: "empty string", + input: "", + want: 0, + }, + { + name: "whitespace only", + input: " ", + want: 0, + }, + { + name: "valid integer", + input: "42", + want: 42, + }, + { + name: "valid with surrounding spaces", + input: " 7 ", + want: 7, + }, + { + name: "negative integer", + input: "-3", + want: -3, + }, + { + name: "plus sign", + input: "+5", + want: 5, + }, + { + name: "non-numeric", + input: "abc", + want: 0, + }, + { + name: "mixed numeric and alpha", + input: "12abc", + want: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := parseIntString(tt.input) + assert.Equal(t, tt.want, got) + }) + } +} + +// nolint:gosec +const groups = `root:x:0:root +wheel:x:10:root,user1 +staff:x:50:user2 +nogroup:x:65534:` + +func TestParseGroupLine(t *testing.T) { + tests := []struct { + name string + input string + want Group + ok bool + }{ + { + name: "simple group with members", + input: "wheel:x:10:root,user1", + want: Group{ + Name: "wheel", + Password: "x", + GID: 10, + Members: []string{"root", "user1"}, + }, + ok: true, + }, + { + name: "group without members", + input: "nogroup:x:65534:", + want: Group{ + Name: "nogroup", + Password: "x", + GID: 65534, + Members: []string{}, + }, + ok: true, + }, + { + name: "invalid parts", + input: "badline:too:many:parts:here", + ok: false, + }, + { + name: "bad gid", + input: "g:x:badgid:member", + ok: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseGroupLine(tt.input) + if !tt.ok { + assert.Error(t, err) + + return + } + + assert.NoError(t, err) + assert.Equal(t, tt.want.Name, got.Name) + assert.Equal(t, tt.want.Password, got.Password) + assert.Equal(t, tt.want.GID, got.GID) + assert.Equal(t, tt.want.Members, got.Members) + }) + } +} + +func TestParseGroupReader(t *testing.T) { + tests := []struct { + name string + data string + wantCount int + wantGID uint32 + wantParts []string + }{ + { + name: "default groups", + data: groups, + wantCount: 4, + wantGID: 10, + wantParts: []string{"root", "user1"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + reader := strings.NewReader(tt.data) + m, err := parseGroupReader(reader) + assert.NoError(t, err) + assert.Equal(t, tt.wantCount, len(m)) + + g, ok := m["wheel"] + assert.True(t, ok) + assert.Equal(t, tt.wantGID, g.GID) + assert.Equal(t, tt.wantParts, g.Members) + }) + } +} + +func TestListGroupsFromFile(t *testing.T) { + tests := []struct { + name string + username string + wantFound []uint32 + }{ + {name: "user1 belongs to wheel", username: "user1", wantFound: []uint32{10}}, + {name: "user2 belongs to staff", username: "user2", wantFound: []uint32{50}}, + {name: "no groups for missing user", username: "unknown", wantFound: []uint32{}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + reader := strings.NewReader(groups) + got, err := ListGroupsFromFile(tt.username, reader) + assert.NoError(t, err) + + for _, want := range tt.wantFound { + assert.Contains(t, got, want) + } + if len(tt.wantFound) == 0 { + assert.Equal(t, 0, len(got)) + } + }) + } +} + +func TestParseUint32(t *testing.T) { + tests := []struct { + name string + input string + want uint32 + wantErr bool + }{ + {name: "valid", input: "1000", want: 1000, wantErr: false}, + {name: "invalid", input: "notanumber", wantErr: true}, + {name: "empty", input: "", wantErr: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseUint32(tt.input) + if tt.wantErr { + assert.Error(t, err) + + return + } + assert.NoError(t, err) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/agent/pkg/osauth/backend.go b/agent/pkg/osauth/backend.go new file mode 100644 index 00000000000..25f8295e8c8 --- /dev/null +++ b/agent/pkg/osauth/backend.go @@ -0,0 +1,8 @@ +package osauth + +//go:generate mockery --name=Backend --filename=backend.go +type Backend interface { + AuthUser(username, password string) bool + LookupUser(username string) (*User, error) + ListGroups(username string) ([]uint32, error) +} diff --git a/agent/pkg/osauth/errors.go b/agent/pkg/osauth/errors.go new file mode 100644 index 00000000000..f39efefbe15 --- /dev/null +++ b/agent/pkg/osauth/errors.go @@ -0,0 +1,6 @@ +package osauth + +import "errors" + +// ErrUserNotFound is returned when the user is not found in the passwd file. +var ErrUserNotFound = errors.New("user not found") diff --git a/agent/pkg/osauth/init_docker.go b/agent/pkg/osauth/init_docker.go new file mode 100644 index 00000000000..9952b18d474 --- /dev/null +++ b/agent/pkg/osauth/init_docker.go @@ -0,0 +1,10 @@ +//go:build docker +// +build docker + +package osauth + +func init() { + DefaultPasswdFilename = "/host/etc/passwd" //nolint: gosec + DefaultShadowFilename = "/host/etc/shadow" + DefaultGroupFilename = "/host/etc/group" +} diff --git a/agent/pkg/osauth/mocks/backend.go b/agent/pkg/osauth/mocks/backend.go new file mode 100644 index 00000000000..3ba85ac8454 --- /dev/null +++ b/agent/pkg/osauth/mocks/backend.go @@ -0,0 +1,105 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + osauth "github.com/shellhub-io/shellhub/agent/pkg/osauth" + mock "github.com/stretchr/testify/mock" +) + +// Backend is an autogenerated mock type for the Backend type +type Backend struct { + mock.Mock +} + +// AuthUser provides a mock function with given fields: username, password +func (_m *Backend) AuthUser(username string, password string) bool { + ret := _m.Called(username, password) + + if len(ret) == 0 { + panic("no return value specified for AuthUser") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(string, string) bool); ok { + r0 = rf(username, password) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// ListGroups provides a mock function with given fields: username +func (_m *Backend) ListGroups(username string) ([]uint32, error) { + ret := _m.Called(username) + + if len(ret) == 0 { + panic("no return value specified for ListGroups") + } + + var r0 []uint32 + var r1 error + if rf, ok := ret.Get(0).(func(string) ([]uint32, error)); ok { + return rf(username) + } + if rf, ok := ret.Get(0).(func(string) []uint32); ok { + r0 = rf(username) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]uint32) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(username) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// LookupUser provides a mock function with given fields: username +func (_m *Backend) LookupUser(username string) (*osauth.User, error) { + ret := _m.Called(username) + + if len(ret) == 0 { + panic("no return value specified for LookupUser") + } + + var r0 *osauth.User + var r1 error + if rf, ok := ret.Get(0).(func(string) (*osauth.User, error)); ok { + return rf(username) + } + if rf, ok := ret.Get(0).(func(string) *osauth.User); ok { + r0 = rf(username) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*osauth.User) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(username) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewBackend creates a new instance of Backend. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewBackend(t interface { + mock.TestingT + Cleanup(func()) +}) *Backend { + mock := &Backend{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/agent/pkg/osauth/utils.go b/agent/pkg/osauth/utils.go new file mode 100644 index 00000000000..99af880fb63 --- /dev/null +++ b/agent/pkg/osauth/utils.go @@ -0,0 +1,26 @@ +package osauth + +import ( + "os" + "strings" +) + +type User struct { + UID uint32 // The user ID of the account. + GID uint32 // The group ID of the account. + Username string // The login name of the account. + Password string // The hashed password of the account. + Name string // The full name of the account owner. + HomeDir string // The home directory path of the account. + Shell string // The default login shell for the account. +} + +// PermitEmptyPasswords checks if the environment variable +// SHELLHUB_PERMIT_EMPTY_PASSWORDS is set to true. If so, it returns true, +// allowing empty passwords. Otherwise, it returns false. +func PermitEmptyPasswords() bool { + // TODO: Consider reading this configuration from the main application's function + // and passing it down to the osauth package. + // TODO: Consider caching the result to avoid repeated environment variable lookups. + return strings.EqualFold(os.Getenv("SHELLHUB_PERMIT_EMPTY_PASSWORDS"), "true") +} diff --git a/pkg/agent/pkg/selfupdater/updater.go b/agent/pkg/selfupdater/updater.go similarity index 100% rename from pkg/agent/pkg/selfupdater/updater.go rename to agent/pkg/selfupdater/updater.go diff --git a/pkg/agent/pkg/selfupdater/updater_docker.go b/agent/pkg/selfupdater/updater_docker.go similarity index 90% rename from pkg/agent/pkg/selfupdater/updater_docker.go rename to agent/pkg/selfupdater/updater_docker.go index 698f41a716a..ceb2e2303aa 100644 --- a/pkg/agent/pkg/selfupdater/updater_docker.go +++ b/agent/pkg/selfupdater/updater_docker.go @@ -11,8 +11,8 @@ import ( "strings" "github.com/Masterminds/semver" - "github.com/docker/docker/api/types" containertypes "github.com/docker/docker/api/types/container" + dockerimage "github.com/docker/docker/api/types/image" "github.com/docker/docker/api/types/mount" "github.com/docker/docker/api/types/network" "github.com/docker/docker/client" @@ -20,7 +20,7 @@ import ( ) type dockerContainer struct { - info *types.ContainerJSON + info *containertypes.InspectResponse } func (c *dockerContainer) splitImageVersion() (image, version string) { @@ -35,7 +35,7 @@ func (c *dockerContainer) splitImageVersion() (image, version string) { } type dockerUpdater struct { - api client.CommonAPIClient + api client.APIClient } func (d *dockerUpdater) CurrentVersion() (*semver.Version, error) { @@ -80,6 +80,10 @@ func (d *dockerUpdater) CompleteUpdate() error { return err } + if err := d.removeContainer(parent); err != nil { + return err + } + _, pv := parent.splitImageVersion() v, _ := semver.NewVersion(pv) v0_4_0, _ := semver.NewVersion("v0.4.0") @@ -134,7 +138,7 @@ func (d *dockerUpdater) CompleteUpdate() error { return err } - if err := d.stopContainer(container); err != nil { + if err := d.removeContainer(container); err != nil { return err } @@ -178,10 +182,15 @@ func (d *dockerUpdater) stopContainer(container *dockerContainer) error { return err } - opts := types.ContainerRemoveOptions{Force: true, RemoveVolumes: true} - err := d.api.ContainerRemove(ctx, container.info.ID, opts) + return nil +} - return err +func (d *dockerUpdater) removeContainer(container *dockerContainer) error { + ctx := context.Background() + + opts := containertypes.RemoveOptions{Force: true, RemoveVolumes: true} + + return d.api.ContainerRemove(ctx, container.info.ID, opts) } func (d *dockerUpdater) updateContainer(container *dockerContainer, image, name string, parent bool) (*dockerContainer, error) { //nolint:unparam @@ -198,7 +207,7 @@ func (d *dockerUpdater) updateContainer(container *dockerContainer, image, name netConfig := &network.NetworkingConfig{EndpointsConfig: container.info.NetworkSettings.Networks} - rd, err := d.api.ImagePull(ctx, image, types.ImagePullOptions{}) + rd, err := d.api.ImagePull(ctx, image, dockerimage.PullOptions{}) if err != nil { return nil, err } @@ -216,7 +225,7 @@ func (d *dockerUpdater) updateContainer(container *dockerContainer, image, name return nil, err } - if err := d.api.ContainerStart(ctx, clone.ID, types.ContainerStartOptions{}); err != nil { + if err := d.api.ContainerStart(ctx, clone.ID, containertypes.StartOptions{}); err != nil { return nil, err } diff --git a/pkg/agent/pkg/selfupdater/updater_native.go b/agent/pkg/selfupdater/updater_native.go similarity index 100% rename from pkg/agent/pkg/selfupdater/updater_native.go rename to agent/pkg/selfupdater/updater_native.go diff --git a/agent/pkg/sysinfo/errors.go b/agent/pkg/sysinfo/errors.go new file mode 100644 index 00000000000..7feac49fd8e --- /dev/null +++ b/agent/pkg/sysinfo/errors.go @@ -0,0 +1,5 @@ +package sysinfo + +import "errors" + +var ErrNoInterfaceFound = errors.New("no interface found") diff --git a/pkg/agent/pkg/sysinfo/network.go b/agent/pkg/sysinfo/network.go similarity index 88% rename from pkg/agent/pkg/sysinfo/network.go rename to agent/pkg/sysinfo/network.go index 708aefc29c0..d4cc647f2bb 100644 --- a/pkg/agent/pkg/sysinfo/network.go +++ b/agent/pkg/sysinfo/network.go @@ -1,7 +1,9 @@ +//go:build !freebsd +// +build !freebsd + package sysinfo import ( - "errors" "math" "net" "os" @@ -11,8 +13,6 @@ import ( "syscall" ) -var ErrNoInterfaceFound = errors.New("no interface found") - func PrimaryInterface() (*net.Interface, error) { interfaces, err := net.Interfaces() if err != nil { @@ -20,7 +20,7 @@ func PrimaryInterface() (*net.Interface, error) { } var ifdev *net.Interface - min := uint64(math.MaxUint16) + minValue := uint64(math.MaxUint16) for i, iface := range interfaces { if iface.Flags&net.FlagLoopback > 0 { @@ -51,8 +51,8 @@ func PrimaryInterface() (*net.Interface, error) { continue } - if ifindex < min { - min = ifindex + if ifindex < minValue { + minValue = ifindex ifdev = &interfaces[i] } } diff --git a/agent/pkg/sysinfo/network_freebsd.go b/agent/pkg/sysinfo/network_freebsd.go new file mode 100644 index 00000000000..fed8c08fe02 --- /dev/null +++ b/agent/pkg/sysinfo/network_freebsd.go @@ -0,0 +1,35 @@ +//go:build freebsd +// +build freebsd + +package sysinfo + +import ( + "net" +) + +func PrimaryInterface() (*net.Interface, error) { + interfaces, err := net.Interfaces() + if err != nil { + return nil, ErrNoInterfaceFound + } + + var ifdev *net.Interface + + for i, iface := range interfaces { + if iface.Flags&net.FlagLoopback > 0 { + continue + } + + if iface.Flags&net.FlagRunning > 0 { + ifdev = &interfaces[i] + + break + } + } + + if ifdev == nil { + return nil, ErrNoInterfaceFound + } + + return ifdev, nil +} diff --git a/pkg/agent/pkg/sysinfo/osrelease.go b/agent/pkg/sysinfo/osrelease.go similarity index 100% rename from pkg/agent/pkg/sysinfo/osrelease.go rename to agent/pkg/sysinfo/osrelease.go diff --git a/agent/pkg/tunnel/context.go b/agent/pkg/tunnel/context.go new file mode 100644 index 00000000000..820730ee6a0 --- /dev/null +++ b/agent/pkg/tunnel/context.go @@ -0,0 +1,79 @@ +package tunnel + +import ( + "context" + "encoding/json" + "errors" + "io" + "time" + + log "github.com/sirupsen/logrus" +) + +type Context struct { + ctx context.Context + + encoder *json.Encoder + decoder *json.Decoder +} + +func (c Context) Deadline() (deadline time.Time, ok bool) { + return c.ctx.Deadline() +} + +func (c Context) Done() <-chan struct{} { + return c.ctx.Done() +} + +func (c Context) Err() error { + return c.ctx.Err() +} + +func (c Context) Value(key any) any { + return c.ctx.Value(key) +} + +func (c Context) Status(status string) error { + if err := c.encoder.Encode(map[string]string{"status": status}); err != nil { + log.WithError(err).Error("failed to send status response") + + return errors.Join(errors.New("failed to send status response"), err) + } + + return nil +} + +func (c Context) Error(err error) error { + if err := c.encoder.Encode(map[string]string{"error": err.Error()}); err != nil { + log.WithError(err).Error("failed to send error response") + + return errors.Join(errors.New("failed to send error response"), err) + } + + return nil +} + +type Headers map[string]string + +func (c Context) Headers() (Headers, error) { + // TODO: cache the headers after the first call. + var header Headers + + if err := c.decoder.Decode(&header); err != nil { + log.WithError(err).Error("failed to decode the header") + + return nil, err + } + + return header, nil +} + +func NewContext(ctx context.Context, rwc io.ReadWriteCloser) Context { + return Context{ + ctx: ctx, + encoder: json.NewEncoder(rwc), + decoder: json.NewDecoder(rwc), + } +} + +type HandlerFunc func(ctx Context, rwc io.ReadWriteCloser) error diff --git a/agent/pkg/tunnel/tunnel.go b/agent/pkg/tunnel/tunnel.go new file mode 100644 index 00000000000..8f8f45ba712 --- /dev/null +++ b/agent/pkg/tunnel/tunnel.go @@ -0,0 +1,137 @@ +package tunnel + +import ( + "context" + "errors" + "io" + "net" + "net/http" + "strings" + + "github.com/gorilla/websocket" + "github.com/labstack/echo/v4" + "github.com/multiformats/go-multistream" + "github.com/shellhub-io/shellhub/pkg/api/client" + log "github.com/sirupsen/logrus" +) + +type HandlerConstraint interface { + echo.HandlerFunc | HandlerFunc +} + +type Tunnel[H HandlerConstraint] interface { + Handle(protocol string, handler H) + Listen(ctx context.Context, listener net.Listener) error + Close() error +} + +type TunnelV2 struct { + mux *multistream.MultistreamMuxer[string] + cli client.Client + listener net.Listener +} + +func NewTunnelV2(cli client.Client) Tunnel[HandlerFunc] { + return &TunnelV2{ + mux: multistream.NewMultistreamMuxer[string](), + cli: cli, + } +} + +func (t *TunnelV2) Handle(protocol string, handler HandlerFunc) { + t.mux.AddHandler(protocol, func(protocol string, rwc io.ReadWriteCloser) error { + log.WithField("protocol", protocol).Debug("handling connection") + defer log.WithField("protocol", protocol).Debug("handling connection closed") + + // TODO: Should we receive a context from outside? + return handler(NewContext(context.TODO(), rwc), rwc) + }) +} + +func (t *TunnelV2) Listen(ctx context.Context, listener net.Listener) error { + t.listener = listener + + for { + stream, err := listener.Accept() + if err != nil { + log.WithError(err).Trace("failed to accept stream") + + switch { + case websocket.IsCloseError(err, websocket.CloseAbnormalClosure): + return errors.Join(ErrTunnelDisconnect, err) + } + + return err + } + + log.Trace("new stream accepted") + + go func() { + log.Trace("handling stream") + + if err := t.mux.Handle(stream); err != nil { + log.WithError(err).Trace("failed to handle stream") + + _ = stream.Close() + } + + log.Trace("stream handled") + }() + } +} + +// Close implements Tunnel. +func (t *TunnelV2) Close() error { + return t.listener.Close() +} + +// ErrTunnelDisconnect is returned when the tunnel connection is closed. +var ErrTunnelDisconnect = errors.New("tunnel disconnected") + +type TunnelV1 struct { + router *echo.Echo + srv *http.Server +} + +func NewTunnelV1() *TunnelV1 { + e := echo.New() + + t := &TunnelV1{ + router: e, + srv: &http.Server{ //nolint:gosec + Handler: e, + ConnContext: func(ctx context.Context, c net.Conn) context.Context { + // TODO: Create a constant for the key. + return context.WithValue(ctx, "http-conn", c) //nolint:revive + }, + }, + } + + return t +} + +func (t *TunnelV1) Handle(protocol string, handler echo.HandlerFunc) { + parts := strings.SplitN(protocol, "://", 2) + + method := parts[0] + path := parts[1] + + t.router.Add(method, path, func(c echo.Context) error { + log.WithField("protocol", protocol).Debug("handling connection") + defer log.WithField("protocol", protocol).Debug("handling connection closed") + + return handler(c) + }) +} + +func (t *TunnelV1) Listen(ctx context.Context, listener net.Listener) error { + return t.srv.Serve(listener) +} + +func (t *TunnelV1) Close() error { + if err := t.router.Close(); err != nil { + return err + } + + return t.srv.Close() +} diff --git a/agent/pkg/yescrypt/yescrypt.go b/agent/pkg/yescrypt/yescrypt.go new file mode 100644 index 00000000000..355b42eed48 --- /dev/null +++ b/agent/pkg/yescrypt/yescrypt.go @@ -0,0 +1,18 @@ +package yescrypt + +import ( + yescrypt "github.com/openwall/yescrypt-go" + log "github.com/sirupsen/logrus" +) + +// Verify verifies a yescrypt hash against a given key. +func Verify(password, hash string) bool { + hashed, err := yescrypt.Hash([]byte(password), []byte(hash)) + if err != nil { + log.WithError(err).Debug("failed to hash the password for comparison") + + return false + } + + return hash == string(hashed) +} diff --git a/agent/pkg/yescrypt/yescrypt_test.go b/agent/pkg/yescrypt/yescrypt_test.go new file mode 100644 index 00000000000..431e39ed59b --- /dev/null +++ b/agent/pkg/yescrypt/yescrypt_test.go @@ -0,0 +1,68 @@ +package yescrypt + +import ( + "crypto/rand" + "math/big" + "testing" + + "github.com/openwall/yescrypt-go" + "github.com/stretchr/testify/assert" +) + +func FuzzVerify(f *testing.F) { + const settings = "$y$j9T$AAt9R641xPvCI9nXw1HHW/" + + for i := 0; i < 100; i++ { + v, err := rand.Int(rand.Reader, big.NewInt(64)) + assert.NoError(f, err) + + password := make([]byte, v.Int64()) + _, err = rand.Read(password) + assert.NoError(f, err) + + hash, err := yescrypt.Hash(password, []byte(settings)) + assert.NoError(f, err) + + f.Add(string(password), string(hash)) + } + + f.Fuzz(func(_ *testing.T, a string, b string) { + assert.True(f, Verify(a, b)) + }) +} + +func TestVeirfy(t *testing.T) { + cases := []struct { + description string + password string + hash string + expected bool + }{ + { + description: "invalid password", + password: "invalid", + hash: "$y$j9T$AAt9R641xPvCI9nXw1HHW/$nCv3bckjDEC9Q5ahIEpyXVNGZhySye/ZdjxNxTY5ttB", + expected: false, + }, + { + description: "invalid hash", + password: "password", + hash: "invalid", + expected: false, + }, + { + description: "valid", + password: "password", + hash: "$y$j9T$AAt9R641xPvCI9nXw1HHW/$nCv3bckjDEC9Q5ahIEpyXVNGZhySye/ZdjxNxTY5ttB", + expected: true, + }, + } + + for _, test := range cases { + t.Run(test.description, func(tt *testing.T) { + result := Verify(test.password, test.hash) + + assert.Equal(tt, test.expected, result) + }) + } +} diff --git a/agent/refresh.yml b/agent/refresh.yml deleted file mode 100644 index 6bdede836a2..00000000000 --- a/agent/refresh.yml +++ /dev/null @@ -1,14 +0,0 @@ -app_root: /go/src/github.com/shellhub-io/shellhub -ignored_folders: -- vendor -included_extensions: -- .go -build_target_path: "" -build_path: /go/src/github.com/shellhub-io/shellhub/agent -build_flags: ["-tags", "docker", "-ldflags", "-X main.AgentVersion=latest"] -build_delay: 200ns -binary_name: agent -command_flags: [] -command_env: [] -enable_colors: true -log_name: "" diff --git a/pkg/agent/server/authentication.go b/agent/server/authentication.go similarity index 100% rename from pkg/agent/server/authentication.go rename to agent/server/authentication.go diff --git a/agent/server/modes/connector/authenticator.go b/agent/server/modes/connector/authenticator.go new file mode 100644 index 00000000000..62853a21dc5 --- /dev/null +++ b/agent/server/modes/connector/authenticator.go @@ -0,0 +1,290 @@ +package connector + +import ( + "archive/tar" + "context" + "crypto" + "crypto/rsa" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "io" + + dockerclient "github.com/docker/docker/client" + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/agent/pkg/osauth" + "github.com/shellhub-io/shellhub/agent/server/modes" + "github.com/shellhub-io/shellhub/pkg/api/client" + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + gossh "golang.org/x/crypto/ssh" +) + +// NOTICE: Ensures the Authenticator interface is implemented. +var _ modes.Authenticator = (*Authenticator)(nil) + +// Authenticator implements the Authenticator interface when the server is running in connector mode. +type Authenticator struct { + // api is a client to communicate with the ShellHub's API. + api client.Client + // authData is the authentication data received from the API to authenticate the device. + authData *models.DeviceAuthResponse + // container is the device name. + // + // NOTICE: Uses a pointer for later assignment. + container *string + // docker is a client to communicate with the Docker's API. + docker dockerclient.APIClient +} + +// NewAuthenticator creates a new instance of Authenticator for the connector mode. +func NewAuthenticator(api client.Client, docker dockerclient.APIClient, authData *models.DeviceAuthResponse, container *string) *Authenticator { + return &Authenticator{ + api: api, + authData: authData, + container: container, + docker: docker, + } +} + +// getPasswd return a [io.Reader] for the container's passwd file. +func getPasswd(ctx context.Context, cli dockerclient.APIClient, container string) (io.Reader, error) { + passwdTar, _, err := cli.CopyFromContainer(ctx, container, "/etc/passwd") + if err != nil { + return nil, err + } + + passwd := tar.NewReader(passwdTar) + if _, err := passwd.Next(); err != nil { + return nil, err + } + + return passwd, nil +} + +// getShadow return a [io.Reader] for the container's shadow file. +func getShadow(ctx context.Context, cli dockerclient.APIClient, container string) (io.Reader, error) { + shadowTar, _, err := cli.CopyFromContainer(ctx, container, "/etc/shadow") + if err != nil { + return nil, err + } + + shadow := tar.NewReader(shadowTar) + if _, err := shadow.Next(); err != nil { + return nil, err + } + + return shadow, nil +} + +// Password handles the server's SSH password authentication when server is running in connector mode. +func (a *Authenticator) Password(ctx gliderssh.Context, username string, password string) bool { + passwd, err := getPasswd(ctx, a.docker, *a.container) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).WithError(err).Error("failed to get the passwd file from container") + + return false + } + + user, err := osauth.LookupUserFromPasswd(username, passwd) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).WithError(err).Error("failed to lookup for the user on passwd file") + + return false + } + + if user.Password == "" { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).WithError(err).Error("user passwd is empty, so the authentication via password is blocked") + + // NOTICE(r): when the user doesn't have password, we block the login. + return false + } + + shadow, err := getShadow(ctx, a.docker, *a.container) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).WithError(err).Error("failed to get the shadow file from container") + + return false + } + + if !osauth.AuthUserFromShadow(username, password, shadow) { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).WithError(err).Error("failed to authenticate the user on the device") + + return false + } + + // NOTICE: set the osauth.User to the context to be obtained later on. + ctx.SetValue("user", user) + + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).Info("using password authentication") + + return true +} + +// PublicKey handles the server's SSH public key authentication when server is running in connector mode. +func (a *Authenticator) PublicKey(ctx gliderssh.Context, username string, key gliderssh.PublicKey) bool { + passwd, err := getPasswd(ctx, a.docker, *a.container) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).WithError(err).Error("failed to get the passwd file from container") + + return false + } + + user, err := osauth.LookupUserFromPasswd(username, passwd) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).WithError(err).Error("failed to lookup for the user on passwd file") + + return false + } + + type Signature struct { + Username string + Namespace string + } + + sig := &Signature{ + Username: username, + Namespace: *a.container, + } + + sigBytes, err := json.Marshal(sig) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + }, + ).WithError(err).Error("failed to marshal signature") + + return false + } + + sigHash := sha256.Sum256(sigBytes) + + fingerprint := gossh.FingerprintLegacyMD5(key) + res, err := a.api.AuthPublicKey(&models.PublicKeyAuthRequest{ + Fingerprint: fingerprint, + Data: string(sigBytes), + }, a.authData.Token) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + "fingerprint": fingerprint, + }, + ).WithError(err).Error("failed to authenticate the user via public key") + + return false + } + + digest, err := base64.StdEncoding.DecodeString(res.Signature) + if err != nil { + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + "fingerprint": fingerprint, + }, + ).WithError(err).Error("failed to decode the signature") + + return false + } + + return false + } + + cryptoKey, ok := key.(gossh.CryptoPublicKey) + if !ok { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + "fingerprint": fingerprint, + }, + ).Error("failed to get the crypto public key") + + return false + } + + pubCrypto := cryptoKey.CryptoPublicKey() + + pubKey, ok := pubCrypto.(*rsa.PublicKey) + if !ok { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + "fingerprint": fingerprint, + }, + ).Error("failed to convert the crypto public key") + + return false + } + + if err = rsa.VerifyPKCS1v15(pubKey, crypto.SHA256, sigHash[:], digest); err != nil { + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + "fingerprint": fingerprint, + }, + ).WithError(err).Error("failed to verify the signature") + + return false + } + + // NOTICE: set the osauth.User to the context to be obtained later on. + ctx.SetValue("user", user) + + log.WithFields( + log.Fields{ + "container": *a.container, + "username": username, + "fingerprint": fingerprint, + }, + ).Info("using public key authentication") + + return true +} diff --git a/agent/server/modes/connector/connector.go b/agent/server/modes/connector/connector.go new file mode 100644 index 00000000000..67f219be6a6 --- /dev/null +++ b/agent/server/modes/connector/connector.go @@ -0,0 +1,94 @@ +// Package connector defines methods for authentication and sessions handles to SSH when it is running in connector mode. +// +// Connector mode means that the SSH's server runs in the host machine, but redirect the IO to a specific docker +// container, maning its authentication through the container's "/etc/passwd", "/etc/shadow" and etc. +package connector + +import ( + "context" + + "github.com/docker/docker/api/types" + dockercontainer "github.com/docker/docker/api/types/container" + dockerclient "github.com/docker/docker/client" + "github.com/docker/docker/pkg/process" + "github.com/shellhub-io/shellhub/agent/pkg/osauth" +) + +type Mode struct { + Authenticator + Sessioner +} + +func attachShellToContainer(ctx context.Context, cli dockerclient.APIClient, container string, user *osauth.User, size [2]int) (*types.HijackedResponse, string, error) { + return attachToContainer(ctx, cli, "shell", container, user, true, []string{}, size) +} + +func attachExecToContainer(ctx context.Context, cli dockerclient.APIClient, container string, user *osauth.User, isPty bool, commands []string, size [2]int) (*types.HijackedResponse, string, error) { + return attachToContainer(ctx, cli, "exec", container, user, isPty, commands, size) +} + +func attachHereDocToContainer(ctx context.Context, cli dockerclient.APIClient, container string, user *osauth.User, size [2]int) (*types.HijackedResponse, string, error) { + return attachToContainer(ctx, cli, "heredoc", container, user, false, []string{}, size) +} + +func attachToContainer(ctx context.Context, cli dockerclient.APIClient, requestType string, container string, user *osauth.User, isPty bool, commands []string, size [2]int) (*types.HijackedResponse, string, error) { + if user.Shell == "" { + user.Shell = "/bin/sh" + } + + s := &[2]uint{ + uint(size[0]), //nolint:gosec + uint(size[1]), //nolint:gosec + } + + id, err := cli.ContainerExecCreate(ctx, container, dockercontainer.ExecOptions{ + User: user.Username, + Tty: isPty, + ConsoleSize: s, + AttachStdin: true, + AttachStdout: true, + AttachStderr: true, + Cmd: func() []string { + switch requestType { + case "shell": + return []string{user.Shell} + case "exec": + // NOTE(r): when the exec session's has `-t` or `-tt` flag, the command must be executed into a tty/pty. + // the Shell's `-c` flag is used to do this. + if isPty { + return append([]string{user.Shell, "-c"}, commands...) + } + + return commands + case "heredoc": + return []string{user.Shell} + default: + return []string{} + } + }(), + }) + if err != nil { + return nil, "", err + } + + res, err := cli.ContainerExecAttach(ctx, id.ID, dockercontainer.ExecStartOptions{ + Tty: isPty, + ConsoleSize: s, + }) + + return &res, id.ID, err +} + +func exitCodeExecFromContainer(cli dockerclient.APIClient, id string) (int, error) { + inspected, err := cli.ContainerExecInspect(context.Background(), id) + if err != nil { + return -1, err + } + + if inspected.Running { + // NOTICE: when a process is running after the exec command, it is necessary to kill it. + return 0, process.Kill(inspected.Pid) + } + + return inspected.ExitCode, nil +} diff --git a/agent/server/modes/connector/sessioner.go b/agent/server/modes/connector/sessioner.go new file mode 100644 index 00000000000..67ad8145395 --- /dev/null +++ b/agent/server/modes/connector/sessioner.go @@ -0,0 +1,212 @@ +package connector + +import ( + "errors" + "fmt" + "io" + "sync" + + dockerclient "github.com/docker/docker/client" + "github.com/docker/docker/pkg/stdcopy" + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/agent/pkg/osauth" + "github.com/shellhub-io/shellhub/agent/server/modes" +) + +var ErrUserNotFound = errors.New("user not found on context") + +// NOTICE: Ensures the Sessioner interface is implemented. +var _ modes.Sessioner = (*Sessioner)(nil) + +// Sessioner implements the Sessioner interface when the server is running in connector mode. +type Sessioner struct { + // container is the device name. + // + // NOTICE: It's a pointer because when the server is created, we don't know the device name yet, that is set later. + container *string + docker dockerclient.APIClient +} + +// NewSessioner creates a new instance of Sessioner for the connector mode. +// The container is a pointer to a string because when the server is created, we don't know the device name yet, that +// is set later. +func NewSessioner(container *string, docker dockerclient.APIClient) *Sessioner { + return &Sessioner{ + container: container, + docker: docker, + } +} + +// Shell handles the server's SSH shell session when server is running in connector mode. +func (s *Sessioner) Shell(session gliderssh.Session) error { + sspty, _, _ := session.Pty() + + // NOTICE(r): To identify what the container the connector should connect to, we use the `deviceName` as the container name + container := *s.container + + user, ok := session.Context().Value("user").(*osauth.User) + if !ok { + return ErrUserNotFound + } + + resp, id, err := attachShellToContainer(session.Context(), s.docker, container, user, [2]int{sspty.Window.Height, sspty.Window.Width}) + if err != nil { + return err + } + defer resp.Close() + + var wg sync.WaitGroup + wg.Add(1) + go func() { + defer wg.Done() + defer func() { + code, err := exitCodeExecFromContainer(s.docker, id) + if err != nil { + fmt.Println(err) + } + + session.Exit(code) //nolint:errcheck + }() + + if _, err := io.Copy(session, resp.Conn); err != nil && err != io.EOF { + fmt.Println(err) + } + }() + + wg.Add(1) + go func() { + defer wg.Done() + defer resp.Close() + + if _, err := io.Copy(resp.Conn, session); err != nil && err != io.EOF { + fmt.Println(err) + } + }() + + wg.Wait() + + return nil +} + +// Exec handles the SSH's server exec session when server is running in connector mode. +func (s *Sessioner) Exec(session gliderssh.Session) error { + sspty, _, isPty := session.Pty() + + // NOTICE(r): To identify what the container the connector should connect to, we use the `deviceName` as the container name + container := *s.container + + user, ok := session.Context().Value("user").(*osauth.User) + if !ok { + return ErrUserNotFound + } + + resp, id, err := attachExecToContainer(session.Context(), s.docker, container, user, isPty, session.Command(), [2]int{sspty.Window.Height, sspty.Window.Width}) + if err != nil { + return err + } + defer resp.Close() + + var wg sync.WaitGroup + wg.Add(1) + go func() { + defer wg.Done() + defer func() { + code, err := exitCodeExecFromContainer(s.docker, id) + if err != nil { + fmt.Println(err) + } + + session.Exit(code) //nolint:errcheck + }() + + // NOTICE: According to the [Docker] documentation, we can "demultiplex" a command sent to container, but only + // when the exec started doesn't allocate a TTY. As a result, we check if the exec's is requesting it and do + // what was recommended by [Docker]'s to get the stdout and stderr separately. + // + // [Docker]: https://pkg.go.dev/github.com/docker/docker/client#Client.ContainerAttach + if isPty { + if _, err := io.Copy(session, resp.Reader); err != nil && err != io.EOF { + fmt.Println(err) + } + } else { + if _, err := stdcopy.StdCopy(session, session.Stderr(), resp.Reader); err != nil && err != io.EOF { + fmt.Println(err) + } + } + }() + + wg.Add(1) + go func() { + defer wg.Done() + defer resp.CloseWrite() //nolint:errcheck + + if _, err := io.Copy(resp.Conn, session); err != nil && err != io.EOF { + fmt.Println(err) + } + }() + + wg.Wait() + + return nil +} + +// Heredoc handles the server's SSH heredoc session when server is running in connector mode. +// +// heredoc is special block of code that contains multi-line strings that will be redirected to a stdin of a shell. It +// request a shell, but doesn't allocate a pty. +func (s *Sessioner) Heredoc(session gliderssh.Session) error { + sspty, _, _ := session.Pty() + + // NOTICE(r): To identify what the container the connector should connect to, we use the `deviceName` as the container name + container := *s.container + + user, ok := session.Context().Value("user").(*osauth.User) + if !ok { + return ErrUserNotFound + } + + resp, id, err := attachHereDocToContainer(session.Context(), s.docker, container, user, [2]int{sspty.Window.Height, sspty.Window.Width}) + if err != nil { + return err + } + defer resp.Close() + + var wg sync.WaitGroup + wg.Add(1) + go func() { + defer wg.Done() + defer func() { + code, err := exitCodeExecFromContainer(s.docker, id) + if err != nil { + fmt.Println(err) + } + + session.Exit(code) //nolint:errcheck + }() + + if _, err := stdcopy.StdCopy(session, session.Stderr(), resp.Reader); err != nil && err != io.EOF { + fmt.Println(err) + } + }() + + wg.Add(1) + go func() { + defer wg.Done() + defer resp.CloseWrite() //nolint:errcheck + + if _, err := io.Copy(resp.Conn, session); err != nil && err != io.EOF { + fmt.Println(err) + } + }() + + wg.Wait() + + return nil +} + +// SFTP handles the SSH's server sftp session when server is running in connector mode. +// +// sftp is a subsystem of SSH that allows file operations over SSH. +func (s *Sessioner) SFTP(_ gliderssh.Session) error { + return errors.New("SFTP isn't supported to ShellHub Agent in connector mode") +} diff --git a/agent/server/modes/host/authenticator.go b/agent/server/modes/host/authenticator.go new file mode 100644 index 00000000000..d75f1814b5b --- /dev/null +++ b/agent/server/modes/host/authenticator.go @@ -0,0 +1,188 @@ +package host + +import ( + "crypto" + "crypto/rsa" + "crypto/sha256" + "encoding/base64" + "encoding/json" + + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/agent/pkg/osauth" + "github.com/shellhub-io/shellhub/agent/server/modes" + "github.com/shellhub-io/shellhub/pkg/api/client" + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + gossh "golang.org/x/crypto/ssh" +) + +// NOTICE: Ensures the Authenticator interface is implemented. +var _ modes.Authenticator = (*Authenticator)(nil) + +// Authenticator implements the Authenticator interface when the server is running in host mode. +type Authenticator struct { + // api is a client to communicate with the ShellHub's API. + api client.Client + // authData is the authentication data received from the API to authenticate the device. + authData *models.DeviceAuthResponse + // singleUserPassword is the password of the single user. + // When it is empty, it means that the single user is disabled. + singleUserPassword string + // deviceName is the device name. + // + // NOTICE: Uses a pointer for later assignment. + deviceName *string +} + +// NewAuthenticator creates a new instance of Authenticator for the host mode. +// It receives the api client to perform requests to the ShellHub's API, the authentication data received by the agent +// when started the communication between it and the agent, the singleUserPassword, what indicates is is running at +// this mode and the deviceName. +// +// The deviceName is a pointer to a string because when the server is created, we don't know the device name yet, that +// is set later. +func NewAuthenticator(api client.Client, authData *models.DeviceAuthResponse, singleUserPassword string, deviceName *string) *Authenticator { + return &Authenticator{ + api: api, + authData: authData, + singleUserPassword: singleUserPassword, + deviceName: deviceName, + } +} + +// Password handles the server's SSH password authentication when server is running in host mode. +func (a *Authenticator) Password(ctx gliderssh.Context, _ string, pass string) bool { + log := log.WithFields(log.Fields{ + "user": ctx.User(), + }) + var ok bool + + if a.singleUserPassword == "" { + ok = osauth.AuthUser(ctx.User(), pass) + } else { + ok = osauth.VerifyPasswordHash(a.singleUserPassword, pass) + } + + if ok { + log.Info("Using password authentication") + } else { + log.Info("Failed to authenticate using password") + } + + return ok +} + +// PublicKey handles the server's SSH public key authentication when server is running in host mode. +func (a *Authenticator) PublicKey(ctx gliderssh.Context, _ string, key gliderssh.PublicKey) bool { + if _, err := osauth.LookupUser(ctx.User()); err != nil { + return false + } + + if key == nil { + return false + } + + type Signature struct { + Username string + Namespace string + } + + sig := &Signature{ + Username: ctx.User(), + Namespace: *a.deviceName, + } + + sigBytes, err := json.Marshal(sig) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.deviceName, + "username": ctx.User(), + }, + ).WithError(err).Error("failed to marshal signature") + + return false + } + + sigHash := sha256.Sum256(sigBytes) + + fingerprint := gossh.FingerprintLegacyMD5(key) + res, err := a.api.AuthPublicKey(&models.PublicKeyAuthRequest{ + Fingerprint: fingerprint, + Data: string(sigBytes), + }, a.authData.Token) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.deviceName, + "username": ctx.User(), + "fingerprint": fingerprint, + }, + ).WithError(err).Error("failed to authenticate the user via public key") + + return false + } + + digest, err := base64.StdEncoding.DecodeString(res.Signature) + if err != nil { + log.WithFields( + log.Fields{ + "container": *a.deviceName, + "username": ctx.User(), + "fingerprint": fingerprint, + }, + ).WithError(err).Error("failed to decode the signature") + + return false + } + + cryptoKey, ok := key.(gossh.CryptoPublicKey) + if !ok { + log.WithFields( + log.Fields{ + "container": *a.deviceName, + "username": ctx.User(), + "fingerprint": fingerprint, + }, + ).Error("failed to get the crypto public key") + + return false + } + + pubCrypto := cryptoKey.CryptoPublicKey() + + pubKey, ok := pubCrypto.(*rsa.PublicKey) + if !ok { + log.WithFields( + log.Fields{ + "container": *a.deviceName, + "username": ctx.User(), + "fingerprint": fingerprint, + }, + ).Error("failed to convert the crypto public key") + + return false + } + + if err = rsa.VerifyPKCS1v15(pubKey, crypto.SHA256, sigHash[:], digest); err != nil { + log.WithFields( + log.Fields{ + "container": *a.deviceName, + "username": ctx.User(), + "fingerprint": fingerprint, + }, + ).WithError(err).Error("failed to verify the signature") + + return false + } + + log.WithFields( + log.Fields{ + "container": *a.deviceName, + "username": ctx.User(), + "fingerprint": fingerprint, + }, + ).Info("using public key authentication") + + return true +} diff --git a/agent/server/modes/host/authenticator_test.go b/agent/server/modes/host/authenticator_test.go new file mode 100644 index 00000000000..6f2eac592be --- /dev/null +++ b/agent/server/modes/host/authenticator_test.go @@ -0,0 +1,291 @@ +package host + +import ( + "crypto" + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "errors" + "testing" + + gliderssh "github.com/gliderlabs/ssh" + "github.com/go-playground/assert/v2" + "github.com/shellhub-io/shellhub/agent/pkg/osauth" + osauthMocks "github.com/shellhub-io/shellhub/agent/pkg/osauth/mocks" + clientMocks "github.com/shellhub-io/shellhub/pkg/api/client/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/mock" + gossh "golang.org/x/crypto/ssh" +) + +func TestPublicKey(t *testing.T) { + // stringToRef is a helper function to convert a string to a pointer to a string. + stringToRef := func(s string) *string { return &s } + + privKey, _ := rsa.GenerateKey(rand.Reader, 2048) + key, _ := gossh.NewPublicKey(&privKey.PublicKey) + + osauthMock := &osauthMocks.Backend{} + osauth.DefaultBackend = osauthMock + + tests := []struct { + ctx gliderssh.Context + authenticator *Authenticator + name string + user string + key gliderssh.PublicKey + requiredMocs func(apiMock *clientMocks.Client, osauthMock *osauthMocks.Backend) + expected bool + }{ + { + ctx: &testSSHContext{ + user: "", + }, + authenticator: &Authenticator{ + deviceName: stringToRef("device"), + api: new(clientMocks.Client), + }, + name: "return false when user is not found", + user: "", + key: nil, + requiredMocs: func(_ *clientMocks.Client, osauthMock *osauthMocks.Backend) { + osauthMock.On("LookupUser", "").Return(nil, osauth.ErrUserNotFound).Once() + }, + expected: false, + }, + { + ctx: &testSSHContext{ + user: "test", + }, + authenticator: &Authenticator{ + authData: &models.DeviceAuthResponse{ + Token: "token", + }, + singleUserPassword: "", + deviceName: stringToRef("device"), + api: new(clientMocks.Client), + }, + name: "return false when public key api request fails", + user: "", + key: key, + requiredMocs: func(apiMock *clientMocks.Client, osauthMock *osauthMocks.Backend) { + osauthMock.On("LookupUser", "test").Return(&osauth.User{Username: "test"}, nil).Once() + apiMock.On("AuthPublicKey", mock.Anything, "token").Return(nil, errors.New("error")).Once() + }, + expected: false, + }, + { + ctx: &testSSHContext{ + user: "test", + }, + authenticator: &Authenticator{ + authData: &models.DeviceAuthResponse{ + Token: "token", + }, + singleUserPassword: "", + deviceName: stringToRef("device"), + api: new(clientMocks.Client), + }, + name: "return false when public key signature is invalid", + user: "", + key: key, + requiredMocs: func(apiMock *clientMocks.Client, osauthMock *osauthMocks.Backend) { + osauthMock.On("LookupUser", "test").Return(&osauth.User{Username: "test"}, nil).Once() + apiMock.On("AuthPublicKey", mock.Anything, "token").Return(&models.PublicKeyAuthResponse{ + Signature: "signature", + }, nil).Once() + }, + expected: false, + }, + { + ctx: &testSSHContext{ + user: "test", + }, + authenticator: &Authenticator{ + authData: &models.DeviceAuthResponse{ + Token: "token", + }, + singleUserPassword: "", + deviceName: stringToRef("device"), + api: new(clientMocks.Client), + }, + name: "return true when public key signature does not implement crypto.PublicKey", + user: "", + key: key, + requiredMocs: func(apiMock *clientMocks.Client, osauthMock *osauthMocks.Backend) { + osauthMock.On("LookupUser", "test").Return(&osauth.User{Username: "test"}, nil).Once() + apiMock.On("AuthPublicKey", mock.Anything, "token").Return(&models.PublicKeyAuthResponse{ + Signature: base64.StdEncoding.EncodeToString([]byte("signature")), + }, nil).Once() + }, + expected: false, + }, + { + ctx: &testSSHContext{ + user: "test", + }, + authenticator: &Authenticator{ + authData: &models.DeviceAuthResponse{ + Token: "token", + }, + singleUserPassword: "", + deviceName: stringToRef("device"), + api: new(clientMocks.Client), + }, + name: "fail when public key returned by crypto.PublicKey is not a pointer to a rsa.PublicKey", + user: "", + key: key, + requiredMocs: func(apiMock *clientMocks.Client, osauthMock *osauthMocks.Backend) { + osauthMock.On("LookupUser", "test").Return(&osauth.User{Username: "test"}, nil).Once() + apiMock.On("AuthPublicKey", mock.Anything, "token").Return(&models.PublicKeyAuthResponse{ + Signature: base64.StdEncoding.EncodeToString([]byte("signature")), + }, nil).Once() + }, + expected: false, + }, + { + ctx: &testSSHContext{ + user: "test", + }, + authenticator: &Authenticator{ + authData: &models.DeviceAuthResponse{ + Token: "token", + }, + singleUserPassword: "", + deviceName: stringToRef("device"), + api: new(clientMocks.Client), + }, + name: "return false when public key returned by crypto.PublicKey does not pass on rsa.VerifyPKCS1v15", + user: "", + key: key, + requiredMocs: func(apiMock *clientMocks.Client, osauthMock *osauthMocks.Backend) { + osauthMock.On("LookupUser", "test").Return(&osauth.User{Username: "test"}, nil).Once() + apiMock.On("AuthPublicKey", mock.Anything, "token").Return(&models.PublicKeyAuthResponse{ + Signature: base64.StdEncoding.EncodeToString([]byte("signature")), + }, nil).Once() + }, + expected: false, + }, + { + ctx: &testSSHContext{ + user: "test", + }, + authenticator: &Authenticator{ + authData: &models.DeviceAuthResponse{ + Token: "token", + }, + singleUserPassword: "", + deviceName: stringToRef("device"), + api: new(clientMocks.Client), + }, + name: "return true when public key signature is valid", + user: "", + key: key, + requiredMocs: func(apiMock *clientMocks.Client, osauthMock *osauthMocks.Backend) { + osauthMock.On("LookupUser", "test").Return(&osauth.User{Username: "test"}, nil).Once() + + type Signature struct { + Username string + Namespace string + } + + sigBytes, _ := json.Marshal(&Signature{ + Username: "test", + Namespace: "device", + }) + + digest := sha256.Sum256(sigBytes) + + signature, _ := rsa.SignPKCS1v15(rand.Reader, privKey, crypto.SHA256, digest[:]) + + apiMock.On("AuthPublicKey", &models.PublicKeyAuthRequest{ + Fingerprint: gossh.FingerprintLegacyMD5(key), + Data: string(sigBytes), + }, "token").Return(&models.PublicKeyAuthResponse{ + Signature: base64.StdEncoding.EncodeToString(signature), + }, nil).Once() + }, + expected: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tt.requiredMocs(tt.authenticator.api.(*clientMocks.Client), osauthMock) + + ok := tt.authenticator.PublicKey(tt.ctx, tt.user, tt.key) + assert.Equal(t, tt.expected, ok) + }) + } +} + +func TestPassword(t *testing.T) { + mock := &osauthMocks.Backend{} + osauth.DefaultBackend = mock + + tests := []struct { + ctx gliderssh.Context + authenticator *Authenticator + name string + user string + password string + requiredMocks func() + expected bool + }{ + { + ctx: &testSSHContext{user: "test"}, + authenticator: &Authenticator{}, + name: "return false when user or password are invalid", + user: "", + password: "password", + requiredMocks: func() { + mock.On("AuthUser", "test", "password").Return(false).Once() + }, + expected: false, + }, + { + ctx: &testSSHContext{user: "test"}, + authenticator: &Authenticator{}, + name: "return true when user and password are valid", + user: "", + password: "test", + requiredMocks: func() { + mock.On("AuthUser", "test", "test").Return(true).Once() + }, + expected: true, + }, + { + ctx: &testSSHContext{user: "test"}, + authenticator: &Authenticator{ + singleUserPassword: "test", + }, + name: "return false when single user is enabled and password is invalid", + user: "", + password: "password", + requiredMocks: func() {}, + expected: false, + }, + { + ctx: &testSSHContext{user: "test"}, + authenticator: &Authenticator{ + singleUserPassword: "$6$Ntq5PynhGPFJuhxn$emiTnyA.GTsvK6JjjrecwDSB3jywkoHky9ZuJAYwSGFlZU2npTFOEMVPYG7CsDLRyvUE7OzbqFidYuKO274DC.", + }, + name: "return true when single user is enabled and password is valid", + user: "", + password: "test", + requiredMocks: func() {}, + expected: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tt.requiredMocks() + + got := tt.authenticator.Password(tt.ctx, tt.user, tt.password) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/agent/server/modes/host/command/command.go b/agent/server/modes/host/command/command.go new file mode 100644 index 00000000000..16d92e965b8 --- /dev/null +++ b/agent/server/modes/host/command/command.go @@ -0,0 +1,8 @@ +package command + +type SFTPServerMode string + +const ( + SFTPServerModeNative SFTPServerMode = "native" + SFTPServerModeDocker SFTPServerMode = "docker" +) diff --git a/agent/server/modes/host/command/command_docker.go b/agent/server/modes/host/command/command_docker.go new file mode 100644 index 00000000000..5e98375215c --- /dev/null +++ b/agent/server/modes/host/command/command_docker.go @@ -0,0 +1,113 @@ +//go:build docker +// +build docker + +package command + +import ( + "fmt" + "os" + "os/exec" + "strconv" + "strings" + + "github.com/shellhub-io/shellhub/agent/pkg/osauth" +) + +func NewCmd(u *osauth.User, shell, term, host string, envs []string, command ...string) *exec.Cmd { + groups, err := osauth.ListGroups(u.Username) + if err != nil { + groups = []uint32{} + } + + // NOTE: Wrap the command with nsenter and setpriv to run it inside the + // host's namespaces with the correct user and groups. This is necessary + // because the agent is running inside a Docker container and we want to + // execute the command in the host's context. + nscommand, _ := nsenterCommandWrapper(u.UID, u.GID, groups, u.HomeDir, command...) + + cmd := exec.Command(nscommand[0], nscommand[1:]...) //nolint:gosec + // TODO: There are other environment variables we could set like SSH_CONNECTION, SSH_TTY, SSH_ORIGINAL_COMMAND, etc. + // We need to check which ones are relevant and set them accordingly. + // https://en.wikibooks.org/wiki/OpenSSH/Client_Applications + cmd.Env = []string{ + "TERM=" + term, + "HOME=" + u.HomeDir, + "SHELL=" + shell, + "USER=" + u.Username, + "LOGNAME=" + u.Username, + "SHELLHUB_HOST=" + host, + // NOTE: We need to set the SSH_CLIENT because some applications (like bash) check for it to enable some + // features or load some files (like .bashrc). Currently, we don't have this information, so we set a fake one. + // TODO: Set the real SSH_CLIENT value. + // Format: " " + // https://en.wikibooks.org/wiki/OpenSSH/Client_Applications + "SSH_CLIENT=127.0.0.1 0 0", + } + cmd.Env = append(cmd.Env, envs...) + + return cmd +} + +func getWrappedCommand(nsArgs []string, uid, gid uint32, groups []uint32, home string) []string { + gids := []string{} + for _, g := range groups { + gids = append(gids, strconv.Itoa(int(g))) + } + + setPrivCmd := []string{ + "/bin/setpriv", + fmt.Sprintf("--groups=%s", strings.Join(gids, ",")), + "--ruid", + strconv.Itoa(int(uid)), + "--regid", + strconv.Itoa(int(gid)), + } + + nsenterCmd := append([]string{ + "/usr/bin/nsenter", + "-t", + "1", + }, nsArgs...) + + nsenterCmd = append(nsenterCmd, + []string{ + "-S", + strconv.Itoa(int(uid)), + fmt.Sprintf("--wdns=%s", home), + }..., + ) + + return append(setPrivCmd, nsenterCmd...) +} + +func nsenterCommandWrapper(uid, gid uint32, groups []uint32, home string, command ...string) ([]string, error) { + if _, err := os.Stat("/usr/bin/nsenter"); err != nil && !os.IsNotExist(err) { + return nil, err + } + + paths := map[string]string{ + "mnt": "-m", + "uts": "-u", + "ipc": "-i", + "net": "-n", + "pid": "-p", + "cgroup": "-C", + "time": "-T", + } + + args := []string{} + for path, params := range paths { + if _, err := os.Stat(fmt.Sprintf("/proc/1/ns/%s", path)); err != nil { + continue + } + + args = append(args, params) + } + + return append(getWrappedCommand(args, uid, gid, groups, home), command...), nil +} + +// SFTPServerCommand creates the command used by agent to start the SFTP server used in a SFTP connection. +func SFTPServerCommand() *exec.Cmd { + return exec.Command("/proc/self/exe", []string{"sftp", string(SFTPServerModeDocker)}...) //nolint:gosec +} diff --git a/agent/server/modes/host/command/command_native.go b/agent/server/modes/host/command/command_native.go new file mode 100644 index 00000000000..80e7a7a01bd --- /dev/null +++ b/agent/server/modes/host/command/command_native.go @@ -0,0 +1,58 @@ +//go:build !docker +// +build !docker + +package command + +import ( + "os" + "os/exec" + "syscall" + + "github.com/shellhub-io/shellhub/agent/pkg/osauth" + log "github.com/sirupsen/logrus" +) + +func NewCmd(u *osauth.User, shell, term, host string, envs []string, command ...string) *exec.Cmd { + groups, err := osauth.ListGroups(u.Username) + if err != nil { + groups = []uint32{} + } + + cmd := exec.Command(command[0], command[1:]...) //nolint:gosec + // TODO: There are other environment variables we could set like SSH_CONNECTION, SSH_TTY, SSH_ORIGINAL_COMMAND, etc. + // We need to check which ones are relevant and set them accordingly. + // https://en.wikibooks.org/wiki/OpenSSH/Client_Applications + cmd.Env = []string{ + "TERM=" + term, + "HOME=" + u.HomeDir, + "SHELL=" + shell, + "SHELLHUB_HOST=" + host, + // NOTE: We need to set the SSH_CLIENT because some applications (like bash) check for it to enable some + // features or load some files (like .bashrc). Currently, we don't have this information, so we set a fake one. + // TODO: Set the real SSH_CLIENT value. + // Format: " " + // https://en.wikibooks.org/wiki/OpenSSH/Client_Applications + "SSH_CLIENT=127.0.0.1 0 0", + } + cmd.Env = append(cmd.Env, envs...) + + if _, err := os.Stat(u.HomeDir); err != nil { + log.WithError(err).WithField("dir", u.HomeDir).Warn("setting user's home directory to /") + + cmd.Dir = "/" + } else { + cmd.Dir = u.HomeDir + } + + if os.Geteuid() == 0 { + cmd.SysProcAttr = &syscall.SysProcAttr{} + cmd.SysProcAttr.Credential = &syscall.Credential{Uid: u.UID, Gid: u.GID, Groups: groups} + } + + return cmd +} + +// SFTPServerCommand creates the command used by agent to start the SFTP server used in a SFTP connection. +func SFTPServerCommand() *exec.Cmd { + return exec.Command("/proc/self/exe", []string{"sftp", string(SFTPServerModeNative)}...) //nolint:gosec +} diff --git a/pkg/agent/server/modes/host/host.go b/agent/server/modes/host/host.go similarity index 100% rename from pkg/agent/server/modes/host/host.go rename to agent/server/modes/host/host.go diff --git a/pkg/agent/server/modes/host/host_test.go b/agent/server/modes/host/host_test.go similarity index 100% rename from pkg/agent/server/modes/host/host_test.go rename to agent/server/modes/host/host_test.go diff --git a/pkg/agent/server/modes/host/pty.go b/agent/server/modes/host/pty.go similarity index 89% rename from pkg/agent/server/modes/host/pty.go rename to agent/server/modes/host/pty.go index 957fd908fb4..865db950080 100644 --- a/pkg/agent/server/modes/host/pty.go +++ b/agent/server/modes/host/pty.go @@ -52,7 +52,7 @@ func startPty(c *exec.Cmd, out io.ReadWriter, winCh <-chan glidderssh.Window) (* go func() { for win := range winCh { - _ = creackpty.Setsize(f, &creackpty.Winsize{uint16(win.Height), uint16(win.Width), 0, 0}) + _ = creackpty.Setsize(f, &creackpty.Winsize{Rows: uint16(win.Height), Cols: uint16(win.Width), X: 0, Y: 0}) //nolint:gosec } }() @@ -100,7 +100,7 @@ func initPty(c *exec.Cmd, sess io.ReadWriter, winCh <-chan glidderssh.Window) (* // listen for window size changes from the SSH client and update the PTY's dimensions. go func() { for win := range winCh { - _ = creackpty.Setsize(pty, &creackpty.Winsize{uint16(win.Height), uint16(win.Width), 0, 0}) + _ = creackpty.Setsize(pty, &creackpty.Winsize{Rows: uint16(win.Height), Cols: uint16(win.Width), X: 0, Y: 0}) //nolint:gosec } }() diff --git a/agent/server/modes/host/sessioner.go b/agent/server/modes/host/sessioner.go new file mode 100644 index 00000000000..53227cbd658 --- /dev/null +++ b/agent/server/modes/host/sessioner.go @@ -0,0 +1,438 @@ +package host + +import ( + "bufio" + "errors" + "fmt" + "io" + "os" + "os/exec" + "os/user" + "sync" + + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/agent/pkg/osauth" + "github.com/shellhub-io/shellhub/agent/server/modes" + "github.com/shellhub-io/shellhub/agent/server/modes/host/command" + "github.com/shellhub-io/shellhub/agent/server/utmp" + log "github.com/sirupsen/logrus" + gossh "golang.org/x/crypto/ssh" +) + +// NOTICE: Ensures the Sessioner interface is implemented. +var _ modes.Sessioner = (*Sessioner)(nil) + +// Sessioner implements the Sessioner interface when the server is running in host mode. +type Sessioner struct { + mu sync.Mutex + cmds map[string]*exec.Cmd + // deviceName is the device name. + // + // NOTICE: It's a pointer because when the server is created, we don't know the device name yet, that is set later. + deviceName *string +} + +func (s *Sessioner) SetCmds(cmds map[string]*exec.Cmd) { + s.cmds = cmds +} + +// NewSessioner creates a new instance of Sessioner for the host mode. +// The device name is a pointer to a string because when the server is created, we don't know the device name yet, that +// is set later. +func NewSessioner(deviceName *string, cmds map[string]*exec.Cmd) *Sessioner { + return &Sessioner{ + deviceName: deviceName, + cmds: cmds, + } +} + +// Shell manages the SSH shell session of the server when operating in host mode. +func (s *Sessioner) Shell(session gliderssh.Session) error { + sspty, winCh, isPty := session.Pty() + + scmd := generateShellCmd(*s.deviceName, session, sspty.Term) + if scmd == nil { + return errors.New("failed to generate shell command") + } + + pts, err := startPty(scmd, session, winCh) + if err != nil { + log.Warn(err) + } + + u, err := osauth.LookupUser(session.User()) + if err != nil { + return err + } + + err = os.Chown(pts.Name(), int(u.UID), -1) + if err != nil { + log.Warn(err) + } + + remoteAddr := session.RemoteAddr() + + log.WithFields(log.Fields{ + "user": session.User(), + "pty": pts.Name(), + "ispty": isPty, + "remoteaddr": remoteAddr, + "localaddr": session.LocalAddr(), + }).Info("Session started") + + ut := utmp.UtmpStartSession( + pts.Name(), + session.User(), + remoteAddr.String(), + ) + + s.mu.Lock() + s.cmds[session.Context().Value(gliderssh.ContextKeySessionID).(string)] = scmd + s.mu.Unlock() + + if err := scmd.Wait(); err != nil { + log.Warn(err) + } + + log.WithFields(log.Fields{ + "user": session.User(), + "pty": pts.Name(), + "remoteaddr": remoteAddr, + "localaddr": session.LocalAddr(), + }).Info("Session ended") + + utmp.UtmpEndSession(ut) + + return nil +} + +// Heredoc handles the server's SSH heredoc session when server is running in host mode. +// +// heredoc is special block of code that contains multi-line strings that will be redirected to a stdin of a shell. It +// request a shell, but doesn't allocate a pty. +func (s *Sessioner) Heredoc(session gliderssh.Session) error { + _, _, isPty := session.Pty() + + cmd := generateShellCmd(*s.deviceName, session, "") + if cmd == nil { + return errors.New("failed to generate heredoc command") + } + + stdout, _ := cmd.StdoutPipe() + stdin, _ := cmd.StdinPipe() + stderr, _ := cmd.StderrPipe() + + serverConn, ok := session.Context().Value(gliderssh.ContextKeyConn).(*gossh.ServerConn) + if !ok { + return fmt.Errorf("failed to get server connection from session context") + } + + go func() { + serverConn.Wait() // nolint:errcheck + cmd.Process.Kill() // nolint:errcheck + }() + + log.WithFields(log.Fields{ + "user": session.User(), + "ispty": isPty, + "remoteaddr": session.RemoteAddr(), + "localaddr": session.LocalAddr(), + "Raw command": session.RawCommand(), + }).Info("Command started") + + err := cmd.Start() + if err != nil { + log.Warn(err) + } + + go func() { + if _, err := io.Copy(stdin, session); err != nil { + fmt.Println(err) //nolint:forbidigo + } + + stdin.Close() + }() + + go func() { + combinedOutput := io.MultiReader(stdout, stderr) + if _, err := io.Copy(session, combinedOutput); err != nil { + fmt.Println(err) //nolint:forbidigo + } + }() + + err = cmd.Wait() + if err != nil { + log.Warn(err) + } + + session.Exit(cmd.ProcessState.ExitCode()) //nolint:errcheck + + log.WithFields(log.Fields{ + "user": session.User(), + "remoteaddr": session.RemoteAddr(), + "localaddr": session.LocalAddr(), + "Raw command": session.RawCommand(), + }).Info("Command ended") + + return nil +} + +// Exec handles the SSH's server exec session when server is running in host mode. +func (s *Sessioner) Exec(session gliderssh.Session) error { + if len(session.Command()) == 0 { + log.WithFields(log.Fields{ + "user": session.User(), + "localaddr": session.LocalAddr(), + }).Error("None command was received") + + log.Info("Session ended") + _ = session.Exit(1) + + return nil + } + + user, err := osauth.LookupUser(session.User()) + if err != nil { + return err + } + + sPty, sWinCh, sIsPty := session.Pty() + + shell := user.Shell + if shell == "" { + shell = os.Getenv("SHELL") + } + + term := sPty.Term + if sIsPty && term == "" { + term = "xterm" + } + + cmd := command.NewCmd(user, shell, term, *s.deviceName, session.Environ(), shell, "-c", session.RawCommand()) + + wg := &sync.WaitGroup{} + if sIsPty { + pty, tty, err := initPty(cmd, session, sWinCh) + if err != nil { + log.Warn(err) + } + + defer tty.Close() + defer pty.Close() + + if err := os.Chown(tty.Name(), int(user.UID), -1); err != nil { + log.Warn(err) + } + } else { + stdout, _ := cmd.StdoutPipe() + stdin, _ := cmd.StdinPipe() + stderr, _ := cmd.StderrPipe() + + // relay input from the SSH session to the command. + go func() { + if _, err := io.Copy(stdin, session); err != nil { + fmt.Println(err) //nolint:forbidigo + } + + stdin.Close() + }() + + wg.Add(1) + + // relay the command's combined output and error streams back to the SSH session. + go func() { + defer wg.Done() + combinedOutput := io.MultiReader(stdout, stderr) + if _, err := io.Copy(session, combinedOutput); err != nil { + fmt.Println(err) //nolint:forbidigo + } + }() + } + + log.WithFields(log.Fields{ + "user": session.User(), + "ispty": sIsPty, + "remoteaddr": session.RemoteAddr(), + "localaddr": session.LocalAddr(), + "Raw command": session.RawCommand(), + }).Info("Command started") + + if err := cmd.Start(); err != nil { + return err + } + + if !sIsPty { + wg.Wait() + } + + serverConn, ok := session.Context().Value(gliderssh.ContextKeyConn).(*gossh.ServerConn) + if !ok { + return fmt.Errorf("failed to get server connection from session context") + } + + // kill the process if the SSH connection is interrupted + go func() { + serverConn.Wait() // nolint:errcheck + cmd.Process.Kill() // nolint:errcheck + }() + + if err := cmd.Wait(); err != nil { + log.Warn(err) + } + + log.WithFields(log.Fields{ + "user": session.User(), + "ispty": sIsPty, + "remoteaddr": session.RemoteAddr(), + "localaddr": session.LocalAddr(), + "Raw command": session.RawCommand(), + }).Info("Command ended") + + if err := session.Exit(cmd.ProcessState.ExitCode()); err != nil { // nolint:errcheck + log.Warn(err) + } + + return nil +} + +// SFTP handles the SSH's server sftp session when server is running in host mode. +// +// sftp is a subsystem of SSH that allows file operations over SSH. +func (s *Sessioner) SFTP(session gliderssh.Session) error { + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Info("SFTP session started") + defer session.Close() + + cmd := command.SFTPServerCommand() + + looked, err := user.Lookup(session.User()) + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("Failed to lookup user") + + return errors.New("failed to lookup user") + } + + home := fmt.Sprintf("HOME=%s", looked.HomeDir) + gid := fmt.Sprintf("GID=%s", looked.Gid) + uid := fmt.Sprintf("UID=%s", looked.Uid) + + cmd.Env = append(cmd.Env, home) + cmd.Env = append(cmd.Env, gid) + cmd.Env = append(cmd.Env, uid) + + input, err := cmd.StdinPipe() + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("Failed to get stdin pipe") + + return errors.New("failed to get stdin pipe") + } + + output, err := cmd.StdoutPipe() + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("Failed to get stdout pipe") + + return errors.New("failed to get stdout pipe") + } + + erro, err := cmd.StderrPipe() + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("Failed to get stderr pipe") + + return errors.New("failed to get stderr pipe") + } + + if err := cmd.Start(); err != nil { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("Failed to start command") + + return errors.New("failed to start command") + } + + go func() { + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Trace("copying input to session") + + if _, err := io.Copy(input, session); err != nil && err != io.EOF { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("Failed to copy stdin to command") + + return + } + + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Trace("closing input to session ends") + + input.Close() + }() + + go func() { + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Trace("copying output to session") + + if _, err := io.Copy(session, output); err != nil { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("Failed to copy stdout to session") + + return + } + + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Trace("closing output to session ends") + }() + + go func() { + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Trace("copying error to session") + + msgs := bufio.NewScanner(erro) + msgs.Split(bufio.ScanLines) + for msgs.Scan() { + if err := msgs.Err(); err != nil { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("failed when reading the error output from sftp process") + + return + } + + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Error(msgs.Text()) + } + + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Trace("closing error to session ends") + }() + + if err = cmd.Wait(); err != nil { + log.WithError(err).WithFields(log.Fields{ + "user": session.Context().User(), + }).Error("Failed to wait command") + + return errors.New("failed to wait command") + } + + log.WithFields(log.Fields{ + "user": session.Context().User(), + }).Info("SFTP session closed") + + return nil +} diff --git a/agent/server/modes/host/utils.go b/agent/server/modes/host/utils.go new file mode 100644 index 00000000000..52fbc217832 --- /dev/null +++ b/agent/server/modes/host/utils.go @@ -0,0 +1,41 @@ +//go:build !freebsd + +package host + +import ( + "fmt" + "os" + "os/exec" + + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/agent/pkg/osauth" + "github.com/shellhub-io/shellhub/agent/server/modes/host/command" +) + +func generateShellCmd(deviceName string, session gliderssh.Session, term string) *exec.Cmd { + username := session.User() + envs := session.Environ() + + user, err := osauth.LookupUser(username) + if err != nil { + return nil + } + + shell := user.Shell + if shell == "" { + shell = os.Getenv("SHELL") + } + + if term == "" { + term = "xterm" + } + + authSock := session.Context().Value("SSH_AUTH_SOCK") + if authSock != nil { + envs = append(envs, fmt.Sprintf("%s=%s", "SSH_AUTH_SOCK", authSock.(string))) + } + + cmd := command.NewCmd(user, shell, term, deviceName, envs, shell, "--login") + + return cmd +} diff --git a/agent/server/modes/host/utils_freebsd.go b/agent/server/modes/host/utils_freebsd.go new file mode 100644 index 00000000000..9533b90c396 --- /dev/null +++ b/agent/server/modes/host/utils_freebsd.go @@ -0,0 +1,35 @@ +//go:build freebsd + +package host + +import ( + "os" + "os/exec" + + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/agent/pkg/osauth" + "github.com/shellhub-io/shellhub/agent/server/modes/host/command" +) + +func generateShellCmd(deviceName string, session gliderssh.Session, term string) *exec.Cmd { + username := session.User() + envs := session.Environ() + + user, err := osauth.LookupUser(username) + if err != nil { + return nil + } + + shell := user.Shell + if shell == "" { + shell = os.Getenv("SHELL") + } + + if term == "" { + term = "xterm" + } + + cmd := command.NewCmd(user, shell, term, deviceName, envs, shell, "-") + + return cmd +} diff --git a/pkg/agent/server/modes/mocks/authenticator.go b/agent/server/modes/mocks/authenticator.go similarity index 100% rename from pkg/agent/server/modes/mocks/authenticator.go rename to agent/server/modes/mocks/authenticator.go diff --git a/pkg/agent/server/modes/modes.go b/agent/server/modes/modes.go similarity index 100% rename from pkg/agent/server/modes/modes.go rename to agent/server/modes/modes.go diff --git a/agent/server/server.go b/agent/server/server.go new file mode 100644 index 00000000000..aa76c7c1bf0 --- /dev/null +++ b/agent/server/server.go @@ -0,0 +1,233 @@ +package server + +import ( + "net" + "os/exec" + "sync" + "time" + + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/agent/server/modes" + "github.com/shellhub-io/shellhub/agent/server/modes/host" + "github.com/shellhub-io/shellhub/pkg/api/client" + log "github.com/sirupsen/logrus" + gossh "golang.org/x/crypto/ssh" +) + +// List of SSH subsystems names supported by the agent. +const ( + // SFTPSubsystemName is the name of the SFTP subsystem. + SFTPSubsystemName = "sftp" +) + +type sshConn struct { + net.Conn + closeCallback func(string) + ctx gliderssh.Context +} + +func (c *sshConn) Close() error { + if id, ok := c.ctx.Value(gliderssh.ContextKeySessionID).(string); ok { + c.closeCallback(id) + } + + return c.Conn.Close() +} + +type Server struct { + sshd *gliderssh.Server + api client.Client + cmds map[string]*exec.Cmd + deviceName string + ContainerID string + mu sync.Mutex + keepAliveInterval uint32 + + // mode is the mode of the server, identifing where and how the SSH's server is running. + // + // For example, the [modes.HostMode] means that the SSH's server runs in the host machine, using the host + // `/etc/passwd`, `/etc/shadow`, redirecting the SSH's connection to the device sdin, stdout and stderr and etc. + // + // Check the [modes] package for more information. + mode modes.Mode + Sessions sync.Map +} + +// SSH channels supported by the SSH server. +// +// An SSH channel refers to a communication link established between a client and a server. SSH channels are multiplexed +// over a single encrypted connection, facilitating concurrent and secure communication for various purposes. +// +// SSH_MSG_CHANNEL_OPEN +// +// Check www.ietf.org/rfc/rfc4254.txt for more information. +const ( + // ChannelSession refers to a type of SSH channel that is established between a client and a server for interactive + // shell sessions or command execution. SSH channels are used to multiplex multiple logical communication channels + // over a single SSH connection. + // + // Check www.ietf.org/rfc/rfc4254.txt at section 6.1 for more information. + ChannelSession string = "session" + // ChannelDirectTcpip is the channel type in SSH is used to establish a direct TCP/IP connection between the SSH + // client and a target host through the SSH server. This channel type allows the client to initiate a connection to + // a specific destination host and port, and the SSH server acts as a bridge to facilitate this connection. + // + // Check www.ietf.org/rfc/rfc4254.txt at section 7.2 for more information. + ChannelDirectTcpip string = "direct-tcpip" +) + +type Feature uint + +const ( + // NoFeature no features enable. + NoFeature Feature = 0 + // LocalPortForwardFeature enable local port forward feature. + LocalPortForwardFeature Feature = iota << 1 + // ReversePortForwardFeature enable reverse port forward feature. + ReversePortForwardFeature +) + +// Config stores configuration needs for the SSH server. +type Config struct { + // PrivateKey is the path for the SSH server private key. + PrivateKey string + // KeepAliveInterval stores the time between each SSH keep alive request. + KeepAliveInterval uint32 + // Features list of featues on SSH server. + Features Feature +} + +// NewServer creates a new server SSH agent server. +func NewServer(api client.Client, mode modes.Mode, cfg *Config) *Server { + server := &Server{ + api: api, + mode: mode, + cmds: make(map[string]*exec.Cmd), + keepAliveInterval: cfg.KeepAliveInterval, + Sessions: sync.Map{}, + } + + if m, ok := mode.(*host.Mode); ok { + m.Sessioner.SetCmds(server.cmds) + } + + server.sshd = &gliderssh.Server{ + PasswordHandler: server.passwordHandler, + PublicKeyHandler: server.publicKeyHandler, + Handler: server.sessionHandler, + SessionRequestCallback: server.sessionRequestCallback, + SubsystemHandlers: map[string]gliderssh.SubsystemHandler{ + SFTPSubsystemName: server.sftpSubsystemHandler, + }, + ConnCallback: func(ctx gliderssh.Context, conn net.Conn) net.Conn { + closeCallback := func(id string) { + server.mu.Lock() + defer server.mu.Unlock() + + if v, ok := server.cmds[id]; ok { + v.Process.Kill() // nolint:errcheck + delete(server.cmds, id) + } + } + + return &sshConn{conn, closeCallback, ctx} + }, + LocalPortForwardingCallback: func(_ gliderssh.Context, _ string, _ uint32) bool { + return cfg.Features&LocalPortForwardFeature > 0 + }, + ReversePortForwardingCallback: func(_ gliderssh.Context, _ string, _ uint32) bool { + return cfg.Features&ReversePortForwardFeature > 0 + }, + ChannelHandlers: map[string]gliderssh.ChannelHandler{ + ChannelSession: gliderssh.DefaultSessionHandler, + ChannelDirectTcpip: gliderssh.DirectTCPIPHandler, + }, + } + + err := server.sshd.SetOption(gliderssh.HostKeyFile(cfg.PrivateKey)) + if err != nil { + log.Warn(err) + } + + return server +} + +// startKeepAlive sends a keep alive message to the server every in keepAliveInterval seconds. +func (s *Server) startKeepAliveLoop(session gliderssh.Session) { + interval := time.Duration(s.keepAliveInterval) * time.Second + + ticker := time.NewTicker(interval) + defer ticker.Stop() + + log.WithFields(log.Fields{ + "interval": interval, + }).Debug("Starting keep alive loop") + +loop: + for { + select { + case <-ticker.C: + if conn, ok := session.Context().Value(gliderssh.ContextKeyConn).(gossh.Conn); ok { + if _, _, err := conn.SendRequest("keepalive", false, nil); err != nil { + log.Error(err) + } + } + case <-session.Context().Done(): + log.Debug("Stopping keep alive loop after session closed") + ticker.Stop() + + break loop + } + } +} + +// List of request types that are supported by SSH. +// +// Once the session has been set up, a program is started at the remote end. The program can be a shell, an application +// program, or a subsystem with a host-independent name. Only one of these requests can succeed per channel. +// +// Check www.ietf.org/rfc/rfc4254.txt at section 6.5 for more information. +const ( + // RequestTypeShell is the request type for shell. + RequestTypeShell = "shell" + // RequestTypeExec is the request type for exec. + RequestTypeExec = "exec" + // RequestTypeSubsystem is the request type for any subsystem. + RequestTypeSubsystem = "subsystem" + // RequestTypeUnknown is the request type for unknown. + // + // It is not a valid request type documentated by SSH's RFC, but it can be useful to identify the request type when + // it is not known. + RequestTypeUnknown = "unknown" +) + +func (s *Server) sessionRequestCallback(session gliderssh.Session, requestType string) bool { + session.Context().SetValue("request_type", requestType) + + go s.startKeepAliveLoop(session) + + return true +} + +func (s *Server) HandleConn(conn net.Conn) { + s.sshd.HandleConn(conn) +} + +func (s *Server) SetDeviceName(name string) { + s.deviceName = name +} + +func (s *Server) SetContainerID(id string) { + s.ContainerID = id +} + +func (s *Server) CloseSession(id string) { + if session, ok := s.Sessions.Load(id); ok { + session.(net.Conn).Close() + s.Sessions.Delete(id) + } +} + +func (s *Server) ListenAndServe() error { + return s.sshd.ListenAndServe() +} diff --git a/agent/server/session.go b/agent/server/session.go new file mode 100644 index 00000000000..fc327818b5b --- /dev/null +++ b/agent/server/session.go @@ -0,0 +1,129 @@ +package server + +import ( + "fmt" + "os" + "os/user" + "path" + "strconv" + + gliderssh "github.com/gliderlabs/ssh" + log "github.com/sirupsen/logrus" +) + +// Type is the type of SSH session. +type Type string + +const ( + // SessionTypeShell is the session's type returned when the SSH client requests a shell. + SessionTypeShell Type = "shell" + // SessionTypeHeredoc is the session's type returned when the SSH client requests a command execution with a heredoc. + // "heredoc" is a format that does not require a TTY, but attaches the client input to the command's stdin. + // It is used to execute a sequence of commands in a single SSH connection without the need to open a shell. + SessionTypeHeredoc Type = "heredoc" + // SessionTypeExec is the session's type returned when the SSH client requests a command execution. + SessionTypeExec Type = "exec" + // SessionTypeSubsystem is the session's type returned when the SSH client requests a subsystem. + SessionTypeSubsystem Type = "subsystem" + // SessionTypeUnknown is the session's type returned when the SSH client requests an unknown session type. + SessionTypeUnknown Type = "unknown" +) + +// GetSessionType returns the session's type based on the SSH client session. +func GetSessionType(session gliderssh.Session) (Type, error) { + _, _, isPty := session.Pty() + + requestType, ok := session.Context().Value("request_type").(string) + if !ok { + return SessionTypeUnknown, fmt.Errorf("failed to get request type from session context") + } + + switch { + case isPty && requestType == RequestTypeShell: + return SessionTypeShell, nil + case !isPty && requestType == RequestTypeShell: + return SessionTypeHeredoc, nil + case requestType == RequestTypeExec: + return SessionTypeExec, nil + case requestType == RequestTypeSubsystem: + return SessionTypeSubsystem, nil + default: + return SessionTypeUnknown, nil + } +} + +func (s *Server) sessionHandler(session gliderssh.Session) { + log.Info("New session request") + + if gliderssh.AgentRequested(session) { + user, err := user.Lookup(session.User()) + if err != nil { + log.WithError(err).Error("failed to get the user") + + return + } + + id, err := strconv.Atoi(user.Uid) + if err != nil { + log.WithError(err).Error("failed to get the user ID") + + return + } + + gid, err := strconv.Atoi(user.Gid) + if err != nil { + log.WithError(err).Error("failed to get the group IP") + + return + } + + l, err := gliderssh.NewAgentListener() + if err != nil { + log.WithError(err).Error("failed to create agent listener") + + return + } + + defer l.Close() + + authSock := l.Addr().String() + + // NOTE: When the agent is started by the root user, we need to change the ownership of the Unix socket created + // to allow access for the logged-in user. + if err := os.Chown(path.Dir(authSock), id, gid); err != nil { + log.WithError(err).Error("failed to change the permission of directory where unix socket was created") + + return + } + + if err := os.Chown(authSock, id, gid); err != nil { + log.WithError(err).Error("failed to change the permission of unix socket") + + return + } + + session.Context().SetValue("SSH_AUTH_SOCK", authSock) + + go gliderssh.ForwardAgentConnections(l, session) + } + + sessionType, err := GetSessionType(session) + if err != nil { + log.Error(err) + + return + } + + log.WithField("type", sessionType).Info("Request type got") + + switch sessionType { + case SessionTypeShell: + s.mode.Shell(session) //nolint:errcheck + case SessionTypeHeredoc: + s.mode.Heredoc(session) //nolint:errcheck + default: + s.mode.Exec(session) //nolint:errcheck + } + + log.Info("Session ended") +} diff --git a/pkg/agent/server/subsystem.go b/agent/server/subsystem.go similarity index 87% rename from pkg/agent/server/subsystem.go rename to agent/server/subsystem.go index 63dc97db0fe..60281c35a0f 100644 --- a/pkg/agent/server/subsystem.go +++ b/agent/server/subsystem.go @@ -6,7 +6,5 @@ import ( // sftpSubsystemHandler handles the SFTP subsystem session. func (s *Server) sftpSubsystemHandler(session gliderssh.Session) { - go s.startKeepAliveLoop(session) - s.mode.SFTP(session) //nolint:errcheck } diff --git a/pkg/agent/server/utmp/utmp.go b/agent/server/utmp/utmp.go similarity index 88% rename from pkg/agent/server/utmp/utmp.go rename to agent/server/utmp/utmp.go index ccb88936729..e01fa44a8e8 100644 --- a/pkg/agent/server/utmp/utmp.go +++ b/agent/server/utmp/utmp.go @@ -43,8 +43,9 @@ func UtmpStartSession(line, user, remoteAddr string) Utmpx { //nolint:revive var u Utmpx u.Type = UserProcess - u.Pid = int32(os.Getpid()) - + // NOTE: The maximum value of a pid in Linux and FreeBSD systems fits inside a 4-byte int32. + // [https://github.com/torvalds/linux/blob/c766d1472c70d25ad475cf56042af1652e792b23/include/uapi/asm-generic/posix_types.h#L28] + u.Pid = int32(os.Getpid()) //nolint:gosec // There are two versions of the utmpSetTime function // defined in utmp_timeval_time??.go, one for systems // that write the time fields as 32-bit values and one @@ -130,7 +131,7 @@ func updUtmp(u Utmpx, id string) { // Lock the file lk := unix.Flock_t{ Type: int16(unix.F_WRLCK), - Pid: int32(os.Getpid()), + Pid: int32(os.Getpid()), //nolint:gosec // The maximum value of a pid in Linux and FreeBSD systems fits inside a int32. } err = unix.FcntlFlock(file.Fd(), unix.F_SETLKW, &lk) @@ -179,12 +180,11 @@ func updUtmp(u Utmpx, id string) { } } - err = binary.Write(file, binary.LittleEndian, &u) - if err != nil { + if err := binary.Write(file, binary.LittleEndian, &u); err != nil { //nolint:staticcheck logrus.WithFields(logrus.Fields{ "file": UtmpxFile, "err": err, - }).Warn("Write failed") + }).Warn("Write failed utmp") } } @@ -207,7 +207,7 @@ func updWtmp(u Utmpx) { lk := unix.Flock_t{ Type: int16(unix.F_WRLCK), - Pid: int32(os.Getpid()), + Pid: int32(os.Getpid()), //nolint:gosec // The maximum value of a pid in Linux and FreeBSD systems fits inside a int32. } err = unix.FcntlFlock(file.Fd(), unix.F_SETLKW, &lk) @@ -245,13 +245,11 @@ func updWtmp(u Utmpx) { } } - err = binary.Write(file, binary.LittleEndian, &u) - - if err != nil { + if err := binary.Write(file, binary.LittleEndian, &u); err != nil { //nolint:staticcheck logrus.WithFields(logrus.Fields{ "file": WtmpxFile, "err": err, - }).Warn("Write failed") + }).Warn("Write failed on wtmp") if err := file.Truncate(fileSize); err != nil { logrus.WithFields(logrus.Fields{ diff --git a/pkg/agent/server/utmp/utmp_timeval_time32.go b/agent/server/utmp/utmp_timeval_time32.go similarity index 95% rename from pkg/agent/server/utmp/utmp_timeval_time32.go rename to agent/server/utmp/utmp_timeval_time32.go index 753c3315746..aa453141bfe 100644 --- a/pkg/agent/server/utmp/utmp_timeval_time32.go +++ b/agent/server/utmp/utmp_timeval_time32.go @@ -48,7 +48,7 @@ func utmpSetTime(u Utmpx) Utmpx { logrus.Warn(err) } - u.Tv.Sec, u.Tv.Usec = int32(a.Sec), int32(a.Usec) + u.Tv.Sec, u.Tv.Usec = int32(a.Sec), int32(a.Usec) //nolint:gosec return u } diff --git a/pkg/agent/server/utmp/utmp_timeval_time64.go b/agent/server/utmp/utmp_timeval_time64.go similarity index 97% rename from pkg/agent/server/utmp/utmp_timeval_time64.go rename to agent/server/utmp/utmp_timeval_time64.go index 36b7214872a..09ff331fc5c 100644 --- a/pkg/agent/server/utmp/utmp_timeval_time64.go +++ b/agent/server/utmp/utmp_timeval_time64.go @@ -49,7 +49,7 @@ func utmpSetTime(u Utmpx) Utmpx { logrus.Warn(err) } - u.Tv.Sec, u.Tv.Usec = a.Sec, a.Usec + u.Tv.Sec, u.Tv.Usec = a.Sec, int64(a.Usec) return u } diff --git a/agent/sftp.go b/agent/sftp.go new file mode 100644 index 00000000000..34de87e267c --- /dev/null +++ b/agent/sftp.go @@ -0,0 +1,107 @@ +package main + +import ( + "errors" + "fmt" + "io" + "os" + "strconv" + "syscall" + + "github.com/pkg/sftp" + "github.com/shellhub-io/shellhub/agent/server/modes/host/command" +) + +type pipe struct { + in *os.File + out *os.File + err *os.File +} + +func (p *pipe) Read(data []byte) (int, error) { + return p.in.Read(data) +} + +func (p *pipe) Write(data []byte) (int, error) { + return p.out.Write(data) +} + +func (p *pipe) Close() error { + os.Exit(0) + + return nil +} + +// NewSFTPServer creates a new SFTP server when a new session is created between the agent and the server. +func NewSFTPServer(mode command.SFTPServerMode) { + piped := &pipe{os.Stdin, os.Stdout, os.Stderr} + + if mode == command.SFTPServerModeDocker { + if err := syscall.Chroot("/host"); err != nil { + fmt.Fprintln(os.Stderr, err) + + return + } + } + + home, ok := os.LookupEnv("HOME") + if !ok { + fmt.Fprintln(os.Stderr, errors.New("HOME environment variable not set")) + + return + } + + toInt := func(s string, _ bool) (int, error) { + i, err := strconv.Atoi(s) + if err != nil { + return 0, err + } + + return i, nil + } + + gid, err := toInt(os.LookupEnv("GID")) + if err != nil { + fmt.Fprintln(os.Stderr, errors.New("GID environment variable not set")) + + return + } + + uid, err := toInt(os.LookupEnv("UID")) + if err != nil { + fmt.Fprintln(os.Stderr, errors.New("UID environment variable not set")) + + return + } + + if err := syscall.Chdir(home); err != nil { + fmt.Fprintln(os.Stderr, err) + + return + } + + if err := syscall.Setgid(gid); err != nil { + fmt.Fprintln(os.Stderr, err) + + return + } + + if err := syscall.Setuid(uid); err != nil { + fmt.Fprintln(os.Stderr, err) + + return + } + + server, err := sftp.NewServer(piped, []sftp.ServerOption{}...) + if err != nil { + fmt.Fprintln(os.Stderr, err) + + return + } + + if err := server.Serve(); err != io.EOF { + fmt.Fprintln(os.Stderr, err) + } + + server.Close() +} diff --git a/api/.air.toml b/api/.air.toml new file mode 100644 index 00000000000..4a5393d21eb --- /dev/null +++ b/api/.air.toml @@ -0,0 +1,34 @@ +root = "../" +tmp_dir = "tmp" + +[build] +pre_cmd = [] +cmd = "go build -gcflags=\"all=-N -l\" -o ./tmp/main ." +post_cmd = [] +bin = "" +full_bin = "dlv exec ./tmp/main" +args_bin = [ + "--listen=0.0.0.0:2345", + "--headless", + "--continue", + "--accept-multiclient", + "--", + "server", +] +delay = 500 +exclude_dir = ["assets", "tmp", "vendor", "testdata"] +exclude_file = [] +exclude_regex = ["_test.go"] +exclude_unchanged = false +follow_symlink = false +include_dir = ["api", "openapi"] +include_ext = ["go", "tpl", "tmpl", "html", "yaml", "yml", "json"] +include_file = [] +kill_delay = "0s" +log = "build-errors.log" +poll = false +poll_interval = 0 +rerun = false +rerun_delay = 500 +send_interrupt = false +stop_on_error = false diff --git a/api/Dockerfile b/api/Dockerfile index 8902bc3229c..485dc83b5ef 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,5 +1,5 @@ # base stage -FROM golang:1.20.4-alpine3.16 AS base +FROM golang:1.24-alpine3.22 AS base ARG GOPROXY @@ -9,6 +9,10 @@ WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub COPY ./go.mod ./ +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/openapi + +COPY ./openapi/go.mod ./ + WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/api COPY ./api/go.mod ./api/go.sum ./ @@ -23,6 +27,7 @@ FROM base AS builder ARG GOPROXY COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg +COPY ./openapi $GOPATH/src/github.com/shellhub-io/shellhub/openapi COPY ./api . WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub @@ -40,9 +45,10 @@ ARG GOPROXY ENV GOPROXY ${GOPROXY} RUN apk add --update openssl build-base docker-cli -RUN go install github.com/markbates/refresh@v1.11.1 && \ - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.53.3 && \ - go install github.com/vektra/mockery/v2/...@v2.20.0 +RUN go install github.com/air-verse/air@v1.62 && \ + go install github.com/go-delve/delve/cmd/dlv@v1.25 && \ + go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.1.6 && \ + go install github.com/vektra/mockery/v2/...@v2.53.2 WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub @@ -52,17 +58,21 @@ COPY ./api/entrypoint-dev.sh /entrypoint.sh WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/api +RUN mkdir -p /templates + +COPY ./install.sh /templates/install.sh + ENTRYPOINT ["/entrypoint.sh"] # production stage -FROM alpine:3.19.0 AS production +FROM alpine:3.23.2 AS production RUN apk add curl COPY --from=builder /go/src/github.com/shellhub-io/shellhub/api/api /api -RUN mkdir /templates +RUN mkdir -p /templates -COPY ./api/templates /templates +COPY ./install.sh /templates/install.sh -ENTRYPOINT /api server +ENTRYPOINT ["/api", "server"] diff --git a/api/entrypoint-dev.sh b/api/entrypoint-dev.sh index 0a254f3f418..fb3344a6d5a 100755 --- a/api/entrypoint-dev.sh +++ b/api/entrypoint-dev.sh @@ -6,10 +6,10 @@ mkdir -p /var/run/secrets if [ ! -f /var/run/secrets/api_private_key ]; then echo "Generating private key" - openssl genrsa -out /var/run/secrets/api_private_key 2048 + openssl genpkey -algorithm RSA -out /var/run/secrets/api_private_key -pkeyopt rsa_keygen_bits:2048 openssl rsa -in /var/run/secrets/api_private_key -pubout -out /var/run/secrets/api_public_key fi ln -sf $PWD/api /api -refresh run +air diff --git a/api/go.mod b/api/go.mod index 1de725e9a67..d89772afa13 100644 --- a/api/go.mod +++ b/api/go.mod @@ -1,83 +1,151 @@ module github.com/shellhub-io/shellhub/api -go 1.20 +go 1.24.9 require ( github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 - github.com/getsentry/sentry-go v0.26.0 - github.com/golang-jwt/jwt v3.2.2+incompatible - github.com/golang-jwt/jwt/v4 v4.5.0 - github.com/hibiken/asynq v0.24.1 - github.com/labstack/echo/v4 v4.11.4 - github.com/mitchellh/mapstructure v1.5.0 + github.com/getkin/kin-openapi v0.133.0 + github.com/getsentry/sentry-go v0.41.0 + github.com/golang-jwt/jwt/v4 v4.5.2 + github.com/gorilla/websocket v1.5.3 + github.com/labstack/echo-contrib v0.17.4 + github.com/labstack/echo/v4 v4.15.0 + github.com/labstack/gommon v0.4.2 github.com/pkg/errors v0.9.1 github.com/shellhub-io/mongotest v0.0.0-20230928124937-e33b07010742 github.com/shellhub-io/shellhub v0.13.4 - github.com/sirupsen/logrus v1.9.3 - github.com/spf13/cobra v1.8.0 + github.com/sirupsen/logrus v1.9.4 + github.com/spf13/cobra v1.10.2 github.com/square/mongo-lock v0.0.0-20230808145049-cfcf499f6bf0 - github.com/stretchr/testify v1.8.4 - github.com/undefinedlabs/go-mpatch v1.0.7 - github.com/xakep666/mongo-migrate v0.2.1 - go.mongodb.org/mongo-driver v1.13.1 - golang.org/x/crypto v0.18.0 - gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637 + github.com/stretchr/testify v1.11.1 + github.com/testcontainers/testcontainers-go v0.40.0 + github.com/testcontainers/testcontainers-go/modules/mongodb v0.40.0 + github.com/xakep666/mongo-migrate v0.3.2 + go.mongodb.org/mongo-driver v1.17.7 + golang.org/x/crypto v0.47.0 ) require ( - github.com/andybalholm/brotli v1.0.5 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect + dario.cat/mergo v1.0.2 // indirect + github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/adhocore/gronx v1.8.1 // indirect + github.com/andybalholm/brotli v1.1.0 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bodgit/plumbing v1.2.0 // indirect + github.com/bodgit/sevenzip v1.3.0 // indirect + github.com/bodgit/windows v1.0.0 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/connesc/cipherio v0.2.1 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/platforms v0.2.1 // indirect + github.com/cpuguy83/dockercfg v0.3.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/docker v28.5.1+incompatible // indirect + github.com/docker/go-connections v0.6.0 // indirect + github.com/docker/go-units v0.5.0 // indirect github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect + github.com/ebitengine/purego v0.8.4 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.3.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.11.2 // indirect github.com/go-redis/cache/v8 v8.4.4 // indirect github.com/go-redis/redis/v8 v8.11.5 // indirect github.com/go-resty/resty/v2 v2.7.0 // indirect - github.com/golang/protobuf v1.5.2 // indirect - github.com/golang/snappy v0.0.4 // indirect - github.com/google/uuid v1.3.0 // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/golang/snappy v1.0.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/gorilla/mux v1.8.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hibiken/asynq v0.24.1 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/klauspost/compress v1.16.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/pgzip v1.2.5 // indirect - github.com/labstack/gommon v0.4.2 // indirect github.com/leodido/go-urn v1.2.2 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect + github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74 // indirect + github.com/magiconair/properties v1.8.10 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mholt/archiver/v3 v3.5.1 // indirect - github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect - github.com/nwaples/rardecode v1.1.3 // indirect + github.com/mholt/archiver/v4 v4.0.0-alpha.8 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/go-archive v0.1.0 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect + github.com/moby/term v0.5.0 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect + github.com/montanaflynn/stats v0.7.1 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/nwaples/rardecode/v2 v2.2.0 // indirect + github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect + github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect github.com/oschwald/geoip2-golang v1.8.0 // indirect github.com/oschwald/maxminddb-golang v1.10.0 // indirect + github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/pierrec/lz4/v4 v4.1.17 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect + github.com/prometheus/client_golang v1.22.0 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.63.0 // indirect + github.com/prometheus/procfs v0.16.1 // indirect github.com/redis/go-redis/v9 v9.0.3 // indirect github.com/robfig/cron/v3 v3.0.1 // indirect github.com/sethvargo/go-envconfig v0.9.0 // indirect + github.com/shirou/gopsutil/v4 v4.25.6 // indirect github.com/spf13/cast v1.3.1 // indirect - github.com/spf13/pflag v1.0.5 // indirect - github.com/stretchr/objx v0.5.0 // indirect + github.com/spf13/pflag v1.0.9 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/therootcompany/xz v1.0.1 // indirect + github.com/tklauser/go-sysconf v0.3.13 // indirect + github.com/tklauser/numcpus v0.7.0 // indirect github.com/tkuchiki/go-timezone v0.2.2 // indirect github.com/tkuchiki/parsetime v0.3.0 // indirect - github.com/ulikunitz/xz v0.5.11 // indirect + github.com/ulikunitz/xz v0.5.14 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect github.com/vmihailenco/go-tinylfu v0.2.2 // indirect github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/woodsbury/decimal128 v1.3.0 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect - github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect - github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect - golang.org/x/net v0.19.0 // indirect - golang.org/x/sync v0.1.0 // indirect - golang.org/x/sys v0.16.0 // indirect - golang.org/x/text v0.14.0 // indirect - golang.org/x/time v0.5.0 // indirect - google.golang.org/protobuf v1.29.1 // indirect + github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect + github.com/yusufpapurcu/wmi v1.2.4 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 // indirect + go.opentelemetry.io/otel v1.37.0 // indirect + go.opentelemetry.io/otel/metric v1.37.0 // indirect + go.opentelemetry.io/otel/trace v1.37.0 // indirect + go4.org v0.0.0-20200411211856-f5505b9728dd // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.40.0 // indirect + golang.org/x/text v0.33.0 // indirect + golang.org/x/time v0.14.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/api/go.sum b/api/go.sum index 7c2c2fcbbd2..046c1777dc2 100644 --- a/api/go.sum +++ b/api/go.sum @@ -1,34 +1,125 @@ -github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= -github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs= -github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/adhocore/gronx v1.8.1 h1:F2mLTG5sB11z7vplwD4iydz3YCEjstSfYmCrdSm3t6A= +github.com/adhocore/gronx v1.8.1/go.mod h1:7oUY1WAU8rEJWmAxXR2DN0JaO4gi9khSgKjiRypqteg= +github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M= +github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bodgit/plumbing v1.2.0 h1:gg4haxoKphLjml+tgnecR4yLBV5zo4HAZGCtAh3xCzM= +github.com/bodgit/plumbing v1.2.0/go.mod h1:b9TeRi7Hvc6Y05rjm8VML3+47n4XTZPtQ/5ghqic2n8= +github.com/bodgit/sevenzip v1.3.0 h1:1ljgELgtHqvgIp8W8kgeEGHIWP4ch3xGI8uOBZgLVKY= +github.com/bodgit/sevenzip v1.3.0/go.mod h1:omwNcgZTEooWM8gA/IJ2Nk/+ZQ94+GsytRzOJJ8FBlM= +github.com/bodgit/windows v1.0.0 h1:rLQ/XjsleZvx4fR1tB/UxQrK+SJ2OFHzfPjLWWOhDIA= +github.com/bodgit/windows v1.0.0/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM= github.com/bsm/ginkgo/v2 v2.7.0 h1:ItPMPH90RbmZJt5GtkcNvIRuGEdwlBItdNVoyzaNQao= github.com/bsm/ginkgo/v2 v2.7.0/go.mod h1:AiKlXPm7ItEHNc/2+OkrNG4E0ITzojb9/xWzvQ9XZ9w= github.com/bsm/gomega v1.26.0 h1:LhQm+AFcgV2M0WyKroMASzAzCAJVpAxQXv4SaI9a69Y= github.com/bsm/gomega v1.26.0/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 h1:ox2F0PSMlrAAiAdknSRMDrAr8mfxPCfSZolH+/qQnyQ= github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08/go.mod h1:pCxVEbcm3AMg7ejXyorUXi6HQCzOIBf7zEDVPtw0/U4= -github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/connesc/cipherio v0.2.1 h1:FGtpTPMbKNNWByNrr9aEBtaJtXjqOzkIXNYJp6OEycw= +github.com/connesc/cipherio v0.2.1/go.mod h1:ukY0MWJDFnJEbXMQtOcn2VmTpRfzcTz4OoVrWGGJZcA= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= +github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/crackcomm/go-clitable v0.0.0-20151121230230-53bcff2fea36/go.mod h1:XiV36mPegOHv+dlkCSCazuGdQR2BUTgIZ2FKqTTHles= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM= +github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= +github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY= github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= +github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw= +github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/getsentry/sentry-go v0.26.0 h1:IX3++sF6/4B5JcevhdZfdKIHfyvMmAq/UnqcyT2H6mA= -github.com/getsentry/sentry-go v0.26.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= +github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= +github.com/getsentry/sentry-go v0.41.0 h1:q/dQZOlEIb4lhxQSjJhQqtRr3vwrJ6Ahe1C9zv+ryRo= +github.com/getsentry/sentry-go v0.41.0/go.mod h1:eRXCoh3uvmjQLY6qu63BjUZnaBu5L5WhMV1RwYO8W5s= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= @@ -44,13 +135,26 @@ github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPr github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/go-test/deep v1.0.1 h1:UQhStjbkDClarlmv0am7OXXO4/GaPdCGiUiMTvi28sg= github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= -github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= +github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= +github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= +github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -58,137 +162,236 @@ github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:W github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= -github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs= +github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hibiken/asynq v0.24.1 h1:+5iIEAyA9K/lcSPvx3qoPtsKJeKI5u9aOIvUmSsazEw= github.com/hibiken/asynq v0.24.1/go.mod h1:u5qVeSbrnfT+vtG5Mq8ZPzQu/BmCKMHvTGb91uy9Tts= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.16.0 h1:iULayQNOReoYUe+1qtKOqw9CwJv3aNQu8ivo7lw1HU4= -github.com/klauspost/compress v1.16.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.11.4 h1:vDZmA+qNeh1pd/cCkEicDMrjtrnMGQ1QFI9gWN1zGq8= -github.com/labstack/echo/v4 v4.11.4/go.mod h1:noh7EvLwqDsmh/X/HWKPUl1AjzJrhyptRyEbQJfxen8= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/labstack/echo-contrib v0.17.4 h1:g5mfsrJfJTKv+F5uNKCyrjLK7js+ZW6HTjg4FnDxxgk= +github.com/labstack/echo-contrib v0.17.4/go.mod h1:9O7ZPAHUeMGTOAfg80YqQduHzt0CzLak36PZRldYrZ0= +github.com/labstack/echo/v4 v4.15.0 h1:hoRTKWcnR5STXZFe9BmYun9AMTNeSbjHi2vtDuADJ24= +github.com/labstack/echo/v4 v4.15.0/go.mod h1:xmw1clThob0BSVRX1CRQkGQ/vjwcpOMjQZSZa9fKA/c= github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0= github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU= github.com/leodido/go-urn v1.2.2 h1:7z68G0FCGvDk646jz1AelTYNYWrTNm0bEcFAo147wt4= github.com/leodido/go-urn v1.2.2/go.mod h1:kUaIbLZWttglzwNuG0pgsh5vuV6u2YcGBYz1hIPjtOQ= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74 h1:1KuuSOy4ZNgW0KA2oYIngXVFhQcXxhLqCVK7cBcldkk= +github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/goveralls v0.0.9/go.mod h1:FRbM1PS8oVsOe9JtdzAAXM+DsvDMMHcM1C7drGJD8HY= -github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo= -github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= +github.com/mholt/archiver/v4 v4.0.0-alpha.8 h1:tRGQuDVPh66WCOelqe6LIGh0gwmfwxUrSSDunscGsRM= +github.com/mholt/archiver/v4 v4.0.0-alpha.8/go.mod h1:5f7FUYGXdJWUjESffJaYR4R60VhnHxb2X3T1teMyv5A= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= +github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= -github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= -github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc= -github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= +github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= +github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A= +github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037/go.mod h1:2bpvgLBZEtENV5scfDFEtB/5+1M4hkQhDQrccEJ/qGw= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 h1:bQx3WeLcUWy+RletIKwUIt4x3t8n2SxavmoclizMb8c= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.15.0/go.mod h1:cIuvLEne0aoVhAgh/O6ac0Op8WWw9H6eYCriF+tEHG0= github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= +github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/oschwald/geoip2-golang v1.8.0 h1:KfjYB8ojCEn/QLqsDU0AzrJ3R5Qa9vFlx3z6SLNcKTs= github.com/oschwald/geoip2-golang v1.8.0/go.mod h1:R7bRvYjOeaoenAp9sKRS8GX5bJWcZ0laWO5+DauEktw= github.com/oschwald/maxminddb-golang v1.10.0 h1:Xp1u0ZhqkSuopaKmk1WwHtjF0H9Hd9181uj2MQ5Vndg= github.com/oschwald/maxminddb-golang v1.10.0/go.mod h1:Y2ELenReaLAZ0b400URyGwvYxHV1dLIxBuyOsyYjHK0= -github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= +github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc= github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q= +github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k= +github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18= +github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= +github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= github.com/redis/go-redis/v9 v9.0.3 h1:+7mmR26M0IvyLxGZUHxu4GiBkJkVDid0Un+j4ScYu4k= github.com/redis/go-redis/v9 v9.0.3/go.mod h1:WqMKv5vnQbRuZstUwxQI195wHy+t4PuXDOjzMvcuQHk= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk= github.com/rwtodd/Go.Sed v0.0.0-20210816025313-55464686f9ef/go.mod h1:8AEUvGVi2uQ5b24BIhcr0GCcpd/RNAFWaN2CJFrWIIQ= github.com/sethvargo/go-envconfig v0.9.0 h1:Q6FQ6hVEeTECULvkJZakq3dZMeBQ3JUpcKMfPQbKMDE= github.com/sethvargo/go-envconfig v0.9.0/go.mod h1:Iz1Gy1Sf3T64TQlJSvee81qDhf7YIlt8GMUX6yyNFs0= github.com/shellhub-io/mongotest v0.0.0-20230928124937-e33b07010742 h1:sIFW1zdZvMTAvpHYOphDoWSh4tiGloK0El2GZni4E+U= github.com/shellhub-io/mongotest v0.0.0-20230928124937-e33b07010742/go.mod h1:6J6yfW5oIvAZ6VjxmV9KyFZyPFVM3B4V3Epbb+1c0oo= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs= +github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= +github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w= +github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g= github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= -github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= +github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= +github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/square/mongo-lock v0.0.0-20230808145049-cfcf499f6bf0 h1:wnVho7xObpxuF7Lr0146VZtfOLfbkXGcvzfFUw2LXuM= github.com/square/mongo-lock v0.0.0-20230808145049-cfcf499f6bf0/go.mod h1:bLPJcGVut+NBtZhrqY/jTnfluDrZeuIvf66VjuwU/eU= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/tidwall/pretty v0.0.0-20190325153808-1166b9ac2b65/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU= +github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY= +github.com/testcontainers/testcontainers-go/modules/mongodb v0.40.0 h1:z/1qHeliTLDKNaJ7uOHOx1FjwghbcbYfga4dTFkF0hU= +github.com/testcontainers/testcontainers-go/modules/mongodb v0.40.0/go.mod h1:GaunAWwMXLtsMKG3xn2HYIBDbKddGArfcGsF2Aog81E= +github.com/testcontainers/testcontainers-go/modules/redis v0.32.0 h1:HW5Qo9qfLi5iwfS7cbXwG6qe8ybXGePcgGPEmVlVDlo= +github.com/testcontainers/testcontainers-go/modules/redis v0.32.0/go.mod h1:5kltdxVKZG0aP1iegeqKz4K8HHyP0wbkW5o84qLyMjY= +github.com/therootcompany/xz v1.0.1 h1:CmOtsn1CbtmyYiusbfmhmkpAAETj0wBIH6kCYaX+xzw= +github.com/therootcompany/xz v1.0.1/go.mod h1:3K3UH1yCKgBneZYhuQUvJ9HPD19UEXEI0BWbMn8qNMY= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tklauser/go-sysconf v0.3.13 h1:GBUpcahXSpR2xN01jhkNAbTLRk2Yzgggk8IM08lq3r4= +github.com/tklauser/go-sysconf v0.3.13/go.mod h1:zwleP4Q4OehZHGn4CYZDipCgg9usW5IJePewFCGVEa0= +github.com/tklauser/numcpus v0.7.0 h1:yjuerZP127QG9m5Zh/mSO4wqurYil27tHrqwRoRjpr4= +github.com/tklauser/numcpus v0.7.0/go.mod h1:bb6dMVcj8A42tSE7i32fsIUCbQNllK5iDguyOZRUzAY= github.com/tkuchiki/go-timezone v0.2.2 h1:MdHR65KwgVTwWFQrota4SKzc4L5EfuH5SdZZGtk/P2Q= github.com/tkuchiki/go-timezone v0.2.2/go.mod h1:oFweWxYl35C/s7HMVZXiA19Jr9Y0qJHMaG/J2TES4LY= github.com/tkuchiki/parsetime v0.3.0 h1:cvblFQlPeAPJL8g6MgIGCHnnmHSZvluuY+hexoZCNqc= github.com/tkuchiki/parsetime v0.3.0/go.mod h1:OJkQmIrf5Ao7R+WYIdITPOfDVj8LmnHGCfQ8DTs3LCA= +github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8= -github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/undefinedlabs/go-mpatch v1.0.7 h1:943FMskd9oqfbZV0qRVKOUsXQhTLXL0bQTVbQSpzmBs= -github.com/undefinedlabs/go-mpatch v1.0.7/go.mod h1:TyJZDQ/5AgyN7FSLiBJ8RO9u2c6wbtRvK827b6AVqY4= +github.com/ulikunitz/xz v0.5.14 h1:uv/0Bq533iFdnMHZdRBTOlaNMdb1+ZxXIlHDZHIHcvg= +github.com/ulikunitz/xz v0.5.14/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= @@ -200,8 +403,10 @@ github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9 github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -github.com/xakep666/mongo-migrate v0.2.1 h1:pRK966a44ujuGMEl73MOzv4MajcH8Q6MWo+TBlxjhvs= -github.com/xakep666/mongo-migrate v0.2.1/go.mod h1:pVQysP+es2wX4TaeVd7zLkRZhKMcBqcC/KRyLms6Eyk= +github.com/woodsbury/decimal128 v1.3.0 h1:8pffMNWIlC0O5vbyHWFZAt5yWvWcrHA+3ovIIjVWss0= +github.com/woodsbury/decimal128 v1.3.0/go.mod h1:C5UTmyTjW3JftjUFzOVhC20BEQa2a4ZKOB5I6Zjb+ds= +github.com/xakep666/mongo-migrate v0.3.2 h1:qmDtIGiMRIwMvc84fOlsDoP+08S6NWLJDPqa4wPfQ1U= +github.com/xakep666/mongo-migrate v0.3.2/go.mod h1:onPlsF/AvU9UZjlyX3PiC5iAPHYJuejPPPqlOvsCGhM= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= @@ -210,104 +415,208 @@ github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3k github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= -github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= -github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= -github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo= -github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.mongodb.org/mongo-driver v1.0.2/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= go.mongodb.org/mongo-driver v1.9.1/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= -go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= -go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= -go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= +go.mongodb.org/mongo-driver v1.17.7 h1:a9w+U3Vt67eYzcfq3k/OAv284/uUUkL0uP75VE5rCOU= +go.mongodb.org/mongo-driver v1.17.7/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.mongodb.org/mongo-driver/v2 v2.3.0 h1:sh55yOXA2vUjW1QYw/2tRlHSQViwDyPnW61AwpZ4rtU= +go.mongodb.org/mongo-driver/v2 v2.3.0/go.mod h1:jHeEDJHJq7tm6ZF45Issun9dbogjfnPySb1vXA7EeAI= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 h1:Xs2Ncz0gNihqu9iosIZ5SkBbWo5T8JhhLJFMQL1qmLI= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0/go.mod h1:vy+2G/6NvVMpwGX/NyLqcC41fxepnuKHk16E6IZUcJc= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0 h1:1wp/gyxsuYtuE/JFxsQRtcCDtMrO2qMvlfXALU5wkzI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0/go.mod h1:gbTHmghkGgqxMomVQQMur1Nba4M0MQ8AYThXDUjsJ38= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/sdk v1.26.0 h1:Y7bumHf5tAiDlRYFmGqetNcLaVUZmh4iYfmGxtmz7F8= +go.opentelemetry.io/otel/sdk v1.26.0/go.mod h1:0p8MXpqLeJ0pzcszQQN4F0S5FVjBLgypeGSngLsmirs= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= +go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +go4.org v0.0.0-20200411211856-f5505b9728dd h1:BNJlw5kRTzdmyfh5U8F93HA2OwkP7ZGwA51eJ/0wKOU= +go4.org v0.0.0-20200411211856-f5505b9728dd/go.mod h1:CIiUVy99QCPfoE13bO4EZaz5GZMZXMSBGhxRdsvzbkg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= +golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= +golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= +golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201008141435-b3e1573b7520/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= +golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= +golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= +golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= -golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= @@ -316,6 +625,46 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20230526203410-71b5a4ffd15e h1:Ao9GzfUMPH3zjVfzXG5rlWlk+Q8MXWKwWpwVQE1MXfw= +google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 h1:wKguEg1hsxI2/L3hUYrpo1RVi48K+uTyzKqprwLXsb8= +google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142/go.mod h1:d6be+8HhtEtucleCbxpPW9PA9XwISACu8nvpPqF0BVo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.72.0 h1:S7UkcVa60b5AAQTaO6ZKamFp1zMZSU0fGDK2WZLbBnM= +google.golang.org/grpc v1.72.0/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -324,18 +673,17 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.29.1 h1:7QBf+IK2gx70Ap/hDsOmam3GE0v9HicjfEdAxE62UoM= -google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637 h1:yiW+nvdHb9LVqSHQBXfZCieqV4fzYhNBql77zY0ykqs= -gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637/go.mod h1:BHsqpu/nsuzkT5BpiH1EMZPLyqSMM8JbIavyFACoFNk= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= @@ -344,3 +692,13 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q= +gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/api/main.go b/api/main.go index 97f3e5d4d4b..af005156751 100644 --- a/api/main.go +++ b/api/main.go @@ -2,31 +2,67 @@ package main import ( "context" + "os" + "os/signal" + "syscall" "github.com/shellhub-io/shellhub/pkg/envs" "github.com/shellhub-io/shellhub/pkg/loglevel" - "github.com/sirupsen/logrus" + log "github.com/sirupsen/logrus" "github.com/spf13/cobra" ) -func init() { - loglevel.SetLogLevel() -} - func main() { + loglevel.UseEnvs() + rootCmd := &cobra.Command{Use: "api"} + rootCmd.AddCommand(&cobra.Command{ + Use: "server", + RunE: func(cmd *cobra.Command, _ []string) error { + env, err := envs.ParseWithPrefix[env]("API_") + if err != nil { + log.WithError(err). + Error("Failed to load environment variables") - rootCmd.AddCommand(serverCmd) + return err + } - // Populates configuration based on environment variables prefixed with 'API_'. - cfg, err := envs.ParseWithPrefix[config]("API_") - if err != nil { - logrus.WithError(err).Fatal("Failed to load environment variables") - } + server := &Server{env: env} + + if err := server.Setup(cmd.Context()); err != nil { + log.WithError(err). + Error("failed to setup the server") + + return err + } + + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + + go func() { + sig := <-sigs + log.WithField("signal", sig). + Info("shutting down the server") + + server.Shutdown() + os.Exit(0) + }() + + if err := server.Start(); err != nil { + log.WithError(err). + Error("failed too start the server") + + return err + } + + return nil + }, + }) - ctx := context.WithValue(context.TODO(), "cfg", cfg) //nolint:revive + if err := rootCmd.ExecuteContext(context.Background()); err != nil { + log.WithError(err). + Error("failed to execute command") - if err := rootCmd.ExecuteContext(ctx); err != nil { - logrus.Fatal(err) + os.Exit(1) } } diff --git a/api/pkg/dbtest/dbserver.go b/api/pkg/dbtest/dbserver.go index 7734ede6e3c..1fda5807507 100644 --- a/api/pkg/dbtest/dbserver.go +++ b/api/pkg/dbtest/dbserver.go @@ -1,297 +1,96 @@ package dbtest -// mgo - MongoDB driver for Go - -// Copyright (c) 2010-2013 - Gustavo Niemeyer - -// All rights reserved. - -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: - -// 1. Redistributions of source code must retain the above copyright notice, this -// list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. - -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEV - import ( - "bytes" "context" - "fmt" - "net" "os" - "os/exec" - "runtime" - "strconv" - "syscall" - "time" - "github.com/shellhub-io/shellhub/pkg/dockerutils" - log "github.com/sirupsen/logrus" - "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" - - "gopkg.in/tomb.v2" + "github.com/shellhub-io/mongotest" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/modules/mongodb" + "github.com/testcontainers/testcontainers-go/network" ) -func init() { - cmd := exec.Command("/bin/sh", "-c", "docker info") - var out bytes.Buffer - cmd.Stdout = &out - cmd.Stderr = &out - if err := cmd.Run(); err != nil { - fmt.Fprintf(os.Stderr, "---- Failed to initialize dbtest:\n") - fmt.Fprint(os.Stderr, out.String()) - panic("Docker is not installed or is not running properly") - } -} - -// DBServer controls a MongoDB server process to be used within test suites. -// -// The test server is started when Client is called the first time and should -// remain running for the duration of all tests, with the Wipe method being -// called between tests (before each of them) to clear stored data. After all tests -// are done, the Stop method should be called to stop the test server. -type DBServer struct { - Ctx context.Context - timeout time.Duration - client *mongo.Client - output bytes.Buffer - server *exec.Cmd - Host string - network string - tomb tomb.Tomb -} - -func (dbs *DBServer) SetTimeout(timeout int) { - dbs.timeout = time.Duration(timeout) -} - -func (dbs *DBServer) start() { - if dbs.server != nil { - log.Panic("DBServer already started") - } - l, err := net.Listen("tcp", "127.0.0.1:0") - if err != nil { - log.WithError(err).Panic("unable to listen on a local address") - } - - addr, ok := l.Addr().(*net.TCPAddr) - if !ok { - log.Panic("Type assertion failed") - } - - l.Close() - - dbs.network = "host" // Use same network as docker host - dbs.Host = addr.String() +// Server represents a MongoDB test server instance. +type Server struct { + tContainer *mongodb.MongoDBContainer // Container is the MongoDB container instance. - if dockerutils.IsRunningInDocker() { - containerID, err := dockerutils.CurrentContainerID() - if err != nil { - log.Panic("failed to get current container id: " + err.Error()) - } - - if containerID != "" { - // If tests are running in a docker container use the same container network - dbs.network = fmt.Sprintf("container:%s", containerID) - } + Container struct { + ConnectionString string + ExposedPort string + Database string } - dbs.tomb = tomb.Tomb{} - args := []string{ - "run", "--rm", fmt.Sprintf("--net=%s", dbs.network), "mongo:4.4.8", - "--bind_ip", "127.0.0.1", - "--port", strconv.Itoa(addr.Port), - "--replSet", "rs0", + Fixtures struct { + Root string // Root is the absolute path to seek fixture files. + PreInsertFuncs []mongotest.PreInsertFunc // PreInsertFuncs is a list of functions to run before inserting data. } +} - dbs.server = exec.Command("docker", args...) - dbs.server.SysProcAttr = &syscall.SysProcAttr{Pdeathsig: syscall.SIGTERM} - dbs.server.Stdout = &dbs.output - dbs.server.Stderr = &dbs.output - err = dbs.server.Start() +func (srv *Server) configure(ctx context.Context) error { + ports, err := srv.tContainer.Ports(ctx) if err != nil { - // print error to facilitate troubleshooting as the panic will be caught in a panic handler - fmt.Fprintf(os.Stderr, "mongod failed to start: %v\n", err) - log.WithError(err).Warning("mongod failed to start") - log.Panic(err) + return err } - dbs.tomb.Go(dbs.monitor) - dbs.Wipe() -} - -func (dbs *DBServer) monitor() error { - if _, err := dbs.server.Process.Wait(); err != nil { - log.WithError(err).Warning("mongod container process wait error") + // Index 0 is the IPV4 addr + srv.Container.ExposedPort = ports["27017/tcp"][0].HostPort + cIP, err := srv.tContainer.ContainerIP(ctx) + if err != nil { return err } + srv.Container.ConnectionString = "mongodb://" + cIP + ":27017" - if dbs.tomb.Alive() { - // Present some debugging information. - log.Error("---- mongod container died unexpectedly ----") - fmt.Fprintf(os.Stderr, "%s", dbs.output.Bytes()) - log.Error("---- mongod containers running right now ----") - - cmd := exec.Command("/bin/sh", "-c", "docker ps --filter ancestor=mongo") - cmd.Stdout = os.Stderr - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - log.WithError(err).Warning("Failed to list running mongo containers") - - return err - } - - log.Error("----------------------------------------") - log.Panic("mongod container died unexpectedly") + if srv.Container.Database == "" { + srv.Container.Database = "test" } return nil } -// Stop stops the test server process, if it is running. -// -// It's okay to call Stop multiple times. After the test server is -// stopped it cannot be restarted. -// -// All database clients must be closed before or while the Stop method -// is running. Otherwise Stop will panic after a timeout informing that -// there is a client leak. -func (dbs *DBServer) Stop() { - if dbs.client != nil { - if err := dbs.client.Disconnect(dbs.Ctx); err != nil { - log.Panic("fail to disconnect the database") - } - - dbs.client = nil - } - - if dbs.server != nil { //nolint:nestif - dbs.tomb.Kill(nil) - - // Windows doesn't support Interrupt - if runtime.GOOS == "windows" { - if err := dbs.server.Process.Signal(os.Kill); err != nil { - log.Panic("fail to send os.Kill to the server") - } - } else { - if err := dbs.server.Process.Signal(os.Interrupt); err != nil { - log.Panic("fail to send os.Interrupt to the server") - } - } - - select { - case <-dbs.tomb.Dead(): - case <-time.After(5 * time.Second): - log.Panic("timeout waiting for mongod process to die") - } - dbs.server = nil - } -} - -// Client returns a new client to the server. The returned client -// must be disconnected after the tests are finished. -// -// The first call to Client will start the DBServer. -func (dbs *DBServer) Client() *mongo.Client { - if dbs.server == nil { - dbs.start() - } - - if dbs.client != nil { - return dbs.client - } - +// Up starts a new MongoDB container, configures the database to receive fixtures, +// and returns a DBServer instance. +func (srv *Server) Up(ctx context.Context) error { var err error - if dbs.timeout == 0 { - dbs.timeout = 8 * time.Second - } - - // Wait for mongodb to be available - ticker := time.NewTicker(time.Second) -ticker: - for { - select { - case <-time.After(dbs.timeout): - log.Panic("mongodb connection timeout") - case <-ticker.C: - if _, err := net.Dial("tcp", dbs.Host); err != nil { - continue - } + opts := []testcontainers.ContainerCustomizer{mongodb.WithReplicaSet("rs")} - break ticker - } + // If TESTCONTAINERS_DOCKER_NETWORK env var is set (e.g., when running inside Docker), + // attach the container to that network. This is useful for Docker-in-Docker scenarios. + if networkName := os.Getenv("TESTCONTAINERS_DOCKER_NETWORK"); networkName != "" { + opts = append(opts, network.WithNetworkName([]string{"mongo"}, networkName)) } - args := []string{ - "run", "--rm", fmt.Sprintf("--net=%s", dbs.network), "mongo:4.4.8", - "mongo", - "--host", dbs.Host, - "--eval", "rs.initiate()", - "--quiet", - } - - // Initiates mongodb replica set before anything else - cmd := exec.Command("docker", args...) - out, err := cmd.CombinedOutput() + srv.tContainer, err = mongodb.Run(ctx, "mongo:4.4.8", opts...) if err != nil { - fmt.Fprintf(os.Stderr, "%s\n", out) - log.Panic(err) + return err } - clientOptions := options.Client().ApplyURI("mongodb://" + dbs.Host + "/test") - dbs.Ctx = context.Background() - - dbs.client, err = mongo.Connect(dbs.Ctx, clientOptions) - if err != nil { - log.Panic(err) - } - if dbs.client == nil { - log.Panic("cant connect") + if err := srv.configure(ctx); err != nil { + return err } - // Verify that the server is accepting connections - if err := dbs.client.Ping(dbs.Ctx, nil); err != nil { - log.Panic(err) - } + mongotest.Configure(mongotest.Config{ + URL: srv.Container.ConnectionString, + Database: srv.Container.Database, + FixtureRootDir: srv.Fixtures.Root, + PreInsertFuncs: srv.Fixtures.PreInsertFuncs, + FixtureFormat: mongotest.FixtureFormatJSON, + }) - return dbs.client + return nil } -func (dbs *DBServer) CTX() context.Context { - return dbs.Ctx +// Down gracefully terminates the MongoDB container. +func (srv *Server) Down(ctx context.Context) error { + return srv.tContainer.Terminate(ctx) } -// Wipe drops all created databases and their data. -func (dbs *DBServer) Wipe() { - if dbs.server == nil || dbs.client == nil { - return - } - client := dbs.Client() - names, err := client.ListDatabaseNames(dbs.Ctx, bson.M{}) - if err != nil { - log.Panic(err) - } - for _, name := range names { - switch name { - case "admin", "local", "config": - default: - err = dbs.client.Database(name).Drop(dbs.Ctx) - if err != nil { - log.Panic(err) - } - } - } +// Apply applies specified fixtures to the database. +func (*Server) Apply(fixtures ...string) error { + return mongotest.UseFixture(fixtures...) +} + +// Reset resets the entire database, removing all data. +func (*Server) Reset() error { + return mongotest.DropDatabase() } diff --git a/api/pkg/dbtest/docs.go b/api/pkg/dbtest/docs.go new file mode 100644 index 00000000000..d43500429e8 --- /dev/null +++ b/api/pkg/dbtest/docs.go @@ -0,0 +1,37 @@ +// Package dbtest provides utilities for setting up MongoDB test environments, +// including container initialization, fixture management, and database reset functionality. +// +// Usage: +// +// ctx := context.Background() +// +// // Initialize a new MongoDB test server +// srv := &dbtest.Server{} +// srv.Database = "test" +// srv.Fixtures.Root = "/path/to/fixtures" +// srv.Fixtures.PreInsertFuncs = []mongotest.PreInsertFunc{ +// mongotest.SimpleConvertObjID("users", "_id"), +// // [...] +// } +// +// // Start the MongoDB container and configure fixtures +// if err := srv.Up(ctx); err != nil { +// log.Fatalf("Error starting MongoDB container: %v", err) +// } +// +// defer func() { +// if err := srv.Down(ctx); err != nil { +// log.Fatalf("Error stopping MongoDB container: %v", err) +// } +// }() +// +// // Apply fixtures +// if err := srv.Apply("fixture1.json", "fixture2.json"); err != nil { +// log.Fatalf("Error applying fixtures: %v", err) +// } +// +// // Reset the database +// if err := srv.Reset(); err != nil { +// log.Fatalf("Error resetting database: %v", err) +// } +package dbtest diff --git a/api/pkg/echo/handlers/binder_test.go b/api/pkg/echo/handlers/binder_test.go new file mode 100644 index 00000000000..ebc04fa47ec --- /dev/null +++ b/api/pkg/echo/handlers/binder_test.go @@ -0,0 +1,34 @@ +package handlers + +import ( + "testing" +) + +func TestBinder(t *testing.T) { + cases := []struct { + description string + expected error + }{ + { + description: "succeeds to bind json body", + expected: nil, + }, + { + description: "succeeds to bind path parameters", + expected: nil, + }, + { + description: "succeeds to bind query parameters", + expected: nil, + }, + { + description: "succeeds to bind query parameters with special characters", + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(_ *testing.T) { + }) + } +} diff --git a/api/pkg/echo/handlers/errors.go b/api/pkg/echo/handlers/errors.go index e2f0e8ea5f1..8872c09903c 100644 --- a/api/pkg/echo/handlers/errors.go +++ b/api/pkg/echo/handlers/errors.go @@ -7,7 +7,6 @@ import ( "github.com/getsentry/sentry-go" "github.com/labstack/echo/v4" "github.com/shellhub-io/shellhub/api/pkg/echo/handlers/pkg/converter" - "github.com/shellhub-io/shellhub/api/pkg/guard" routes "github.com/shellhub-io/shellhub/api/routes/errors" "github.com/shellhub-io/shellhub/api/services" "github.com/shellhub-io/shellhub/api/store" @@ -66,8 +65,6 @@ func NewErrors(reporter *sentry.Client) func(error, echo.Context) { var status int switch e.Layer { - case guard.ErrLayer: - status = http.StatusForbidden case routes.ErrLayer: status = converter.FromErrRouteToHTTPStatus(e.Code) case services.ErrLayer: diff --git a/api/pkg/echo/handlers/pkg/converter/converter.go b/api/pkg/echo/handlers/pkg/converter/converter.go index 7a54482fb2d..1d373dd3a0c 100644 --- a/api/pkg/echo/handlers/pkg/converter/converter.go +++ b/api/pkg/echo/handlers/pkg/converter/converter.go @@ -10,6 +10,8 @@ import ( // FromErrServiceToHTTPStatus converts a service error code to http status. func FromErrServiceToHTTPStatus(code int) int { switch code { + case services.ErrCodeCreated: + return http.StatusCreated case services.ErrCodeNotFound: return http.StatusNotFound case services.ErrCodeInvalid: diff --git a/api/pkg/fixtures/data/active_sessions.json b/api/pkg/fixtures/data/active_sessions.json deleted file mode 100644 index f957a14ecca..00000000000 --- a/api/pkg/fixtures/data/active_sessions.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "active_sessions": { - "650a1c1b3b3bb3a0f8e9bf43": { - "last_seen": "2023-01-01T12:00:00.000Z", - "uid": "a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68" - } - } -} diff --git a/api/pkg/fixtures/data/announcements.json b/api/pkg/fixtures/data/announcements.json deleted file mode 100644 index 3bf72e4a355..00000000000 --- a/api/pkg/fixtures/data/announcements.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "announcements": { - "65087e99c5381e09e4978750": { - "date": "2023-01-01T12:00:00.000Z", - "title": "title-0", - "content": "content-0", - "uuid": "00000000-0000-4000-0000-000000000000" - }, - "656f782a8145ed4a51f157fe": { - "date": "2023-01-02T12:00:00.000Z", - "title": "title-1", - "content": "content-1", - "uuid": "00000000-0000-4001-0000-000000000000" - }, - "656f78089dbccf33fd973ae7": { - "date": "2023-01-03T12:00:00.000Z", - "title": "title-2", - "content": "content-2", - "uuid": "00000000-0000-4002-0000-000000000000" - }, - "657b1297ab10b82c93029216": { - "date": "2023-01-04T12:00:00.000Z", - "title": "title-3", - "content": "content-3", - "uuid": "00000000-0000-4003-0000-000000000000" - } - } -} diff --git a/api/pkg/fixtures/data/connected_devices.json b/api/pkg/fixtures/data/connected_devices.json deleted file mode 100644 index b0ad381a781..00000000000 --- a/api/pkg/fixtures/data/connected_devices.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "connected_devices": { - "6500c8f086353a36732d544b": { - "last_seen": "2023-01-01T12:00:00.000Z", - "status": "accepted", - "tenant_id": "00000000-0000-4000-0000-000000000000", - "uid": "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c" - } - } -} diff --git a/api/pkg/fixtures/data/licenses.json b/api/pkg/fixtures/data/licenses.json deleted file mode 100644 index 97a99076266..00000000000 --- a/api/pkg/fixtures/data/licenses.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "licenses": { - "6508bcd25f331f4e6d4a245c": { - "created_at": "2023-01-01T12:00:00.000Z", - "rawdata": "test" - } - } -} diff --git a/api/pkg/fixtures/data/namespaces.json b/api/pkg/fixtures/data/namespaces.json deleted file mode 100644 index 0b16663b42f..00000000000 --- a/api/pkg/fixtures/data/namespaces.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "namespaces": { - "6500c8f086353a36732d544a": { - "created_at": "2023-01-01T12:00:00.000Z", - "max_devices": -1, - "members": [ - { - "id": "507f1f77bcf86cd799439011", - "role": "owner" - }, - { - "id": "6509e169ae6144b2f56bf288", - "role": "observer" - } - ], - "name": "namespace-1", - "owner": "507f1f77bcf86cd799439011", - "settings": { - "session_record": true - }, - "tenant_id": "00000000-0000-4000-0000-000000000000" - }, - "e5b45d9a2c0a4f8dbb7f4e5d": { - "created_at": "2023-01-01T12:00:00.000Z", - "max_devices": 10, - "members": [ - { - "id": "6509e169ae6144b2f56bf288", - "role": "owner" - }, - { - "id": "907f1f77bcf86cd799439022", - "role": "operator" - } - ], - "name": "namespace-2", - "owner": "6509e169ae6144b2f56bf288", - "settings": { - "session_record": false - }, - "tenant_id": "00000000-0000-4001-0000-000000000000" - }, - "3c7f09a5b46c4a63a9ccc071": { - "created_at": "2023-01-01T12:00:00.000Z", - "max_devices": 3, - "members": [ - { - "id": "657b0e3bff780d625f74e49a", - "role": "owner" - } - ], - "name": "namespace-3", - "owner": "657b0e3bff780d625f74e49a", - "settings": { - "session_record": true - }, - "tenant_id": "00000000-0000-4002-0000-000000000000" - }, - "6577271b9f5a02f3bc8f5400": { - "created_at": "2023-01-01T12:00:00.000Z", - "max_devices": -1, - "members": [ - { - "id": "6577267d8752d05270a4c07d", - "role": "owner" - } - ], - "name": "namespace-4", - "owner": "6577267d8752d05270a4c07d", - "settings": { - "session_record": true - }, - "tenant_id": "00000000-0000-4003-0000-000000000000" - } - } -} diff --git a/api/pkg/fixtures/data/public_keys.json b/api/pkg/fixtures/data/public_keys.json deleted file mode 100644 index 306b88fde1d..00000000000 --- a/api/pkg/fixtures/data/public_keys.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "public_keys": { - "65088c97a3efce71bf6e1f32": { - "created_at": "2023-01-01T12:00:00.000Z", - "data": "test", - "filter": { - "hostname": ".*", - "tags": ["tag-1"] - }, - "fingerprint": "fingerprint", - "name": "public_key", - "tenant_id": "00000000-0000-4000-0000-000000000000" - } - } -} diff --git a/api/pkg/fixtures/data/users.json b/api/pkg/fixtures/data/users.json deleted file mode 100644 index 77bf663665e..00000000000 --- a/api/pkg/fixtures/data/users.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "users": { - "507f1f77bcf86cd799439011": { - "confirmed": true, - "created_at": "2023-01-01T12:00:00.000Z", - "last_login": "2023-01-01T12:00:00.000Z", - "email": "john.doe@test.com", - "email_marketing": true, - "max_namespaces": 0, - "name": "john doe", - "password": "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", - "username": "john_doe" - }, - "608f32a2c7351f001f6475e0": { - "confirmed": true, - "created_at": "2023-01-02T12:00:00.000Z", - "last_login": "2023-01-02T12:00:00.000Z", - "email": "jane.smith@test.com", - "email_marketing": true, - "max_namespaces": 3, - "name": "Jane Smith", - "password": "a0b8c29f4c8d57e542f5e81d35ebe801fd27f569f116fe670e8962d798512a1d", - "username": "jane_smith" - }, - "709f45b5e812c1002f3a67e7": { - "confirmed": true, - "created_at": "2023-01-03T12:00:00.000Z", - "last_login": "2023-01-03T12:00:00.000Z", - "email": "bob.johnson@test.com", - "email_marketing": true, - "max_namespaces": 10, - "name": "Bob Johnson", - "password": "5f3b3956a1a150b73e6b27e674f27d7aeb01ab1a40c179c3e1aa6026a36655a2", - "username": "bob_johnson" - }, - "80fdcea1d7299c002f3a67e8": { - "confirmed": false, - "created_at": "2023-01-04T12:00:00.000Z", - "last_login": null, - "email": "alex.rodriguez@test.com", - "email_marketing": false, - "max_namespaces": 3, - "name": "Alex Rodriguez", - "password": "c5093eb98678c7a3324825b84c6b67c1127b93786482ddbbd356e67e29b2763f", - "username": "alex_rodriguez" - } - } -} diff --git a/api/pkg/fixtures/doc.go b/api/pkg/fixtures/doc.go deleted file mode 100644 index 9d34c565ca0..00000000000 --- a/api/pkg/fixtures/doc.go +++ /dev/null @@ -1,8 +0,0 @@ -// Package fixtures provides utilities for setting up MongoDB fixtures. Each fixture -// is a YAML file inside the `data` subdirectory. Each file contains a list of predefined -// data for the specified collection, which has the same name as the file. -// -// You can prepare the database to receive the fixtures using the `Init` function. The package -// also includes `Apply` to apply the provided fixtures and `Teardown` to reset all the applied -// fixtures. All available fixtures are accessible via constants provided by the package. -package fixtures diff --git a/api/pkg/fixtures/fixtures.go b/api/pkg/fixtures/fixtures.go deleted file mode 100644 index 6605fe0661f..00000000000 --- a/api/pkg/fixtures/fixtures.go +++ /dev/null @@ -1,51 +0,0 @@ -package fixtures - -import ( - "path/filepath" - "runtime" - - "github.com/shellhub-io/mongotest" -) - -const ( - FixtureAnnouncements = "announcements" // Check "fixtures.data.announcements" for fixture info - FixtureConnectedDevices = "connected_devices" // Check "fixtures.data.connected_devices" for fixture info - FixtureDevices = "devices" // Check "fixtures.data.devices" for fixture info - FixtureSessions = "sessions" // Check "fixtures.data.sessions" for fixture info - FixtureActiveSessions = "active_sessions" // Check "fixtures.data.active_sessions" for fixture info - FixtureRecordedSessions = "recorded_sessions" // Check "fixtures.data.recorded_sessions" for fixture info - FixtureFirewallRules = "firewall_rules" // Check "fixtures.data.firewall_rules" for fixture info - FixturePublicKeys = "public_keys" // Check "fixtures.data.public_keys" for fixture info - FixturePrivateKeys = "private_keys" // Check "fixtures.data.private_keys" for fixture info - FixtureLicenses = "licenses" // Check "fixtures.data.licenses" for fixture info - FixtureUsers = "users" // Check "fixtures.data.users" for fixture iefo - FixtureNamespaces = "namespaces" // Check "fixtures.data.namespaces" for fixture info - FixtureRecoveryTokens = "recovery_tokens" // Check "fixtures.data.recovery_tokens" for fixture info -) - -// Init configures the mongotest for the provided host's database. It is necessary -// before using any fixtures and panics if any errors arise. -func Init(host, database string) { - _, file, _, ok := runtime.Caller(0) - if !ok { - panic("failed to retrieve the fixtures path at runtime") - } - - mongotest.Configure(mongotest.Config{ - URL: "mongodb://" + host, - Database: database, - FixtureRootDir: filepath.Join(filepath.Dir(file), "data"), - FixtureFormat: mongotest.FixtureFormatJSON, - PreInsertFuncs: setupPreInsertFuncs(), - }) -} - -// Apply applies 'n' fixtures in the database. -func Apply(fixtures ...string) error { - return mongotest.UseFixture(fixtures...) -} - -// Teardown resets all applied fixtures. -func Teardown() error { - return mongotest.DropDatabase() -} diff --git a/api/pkg/fixtures/pre_insert.go b/api/pkg/fixtures/pre_insert.go deleted file mode 100644 index 05b012d1b07..00000000000 --- a/api/pkg/fixtures/pre_insert.go +++ /dev/null @@ -1,119 +0,0 @@ -package fixtures - -import "github.com/shellhub-io/mongotest" - -func setupPreInsertFuncs() []mongotest.PreInsertFunc { - fns := make([]mongotest.PreInsertFunc, 0) - - fns = append(fns, preInsertUsers()...) - fns = append(fns, preInsertRecoveryTokens()...) - fns = append(fns, preInsertAnnouncements()...) - fns = append(fns, preInsertPublicKeys()...) - fns = append(fns, preInsertPrivateKeys()...) - fns = append(fns, preInsertLicenses()...) - fns = append(fns, preInsertNamespaces()...) - fns = append(fns, preInsertDevices()...) - fns = append(fns, preInsertConnectedDevices()...) - fns = append(fns, preInsertFirewallRules()...) - fns = append(fns, preInsertSessions()...) - fns = append(fns, preInsertActiveSessions()...) - fns = append(fns, preInsertRecordedSessions()...) - - return fns -} - -func preInsertUsers() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("users", "_id"), - mongotest.SimpleConvertTime("users", "created_at"), - mongotest.SimpleConvertTime("users", "last_login"), - } -} - -func preInsertRecoveryTokens() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("recovery_tokens", "_id"), - mongotest.SimpleConvertTime("recovery_tokens", "created_at"), - } -} - -func preInsertAnnouncements() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("announcements", "_id"), - mongotest.SimpleConvertTime("announcements", "date"), - } -} - -func preInsertPublicKeys() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("public_keys", "_id"), - mongotest.SimpleConvertBytes("public_keys", "data"), - mongotest.SimpleConvertTime("public_keys", "created_at"), - } -} - -func preInsertPrivateKeys() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("private_keys", "_id"), - mongotest.SimpleConvertBytes("private_keys", "data"), - mongotest.SimpleConvertTime("private_keys", "created_at"), - } -} - -func preInsertLicenses() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("licenses", "_id"), - mongotest.SimpleConvertBytes("licenses", "rawdata"), - mongotest.SimpleConvertTime("licenses", "created_at"), - } -} - -func preInsertNamespaces() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("namespaces", "_id"), - mongotest.SimpleConvertTime("namespaces", "created_at"), - } -} - -func preInsertDevices() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("devices", "_id"), - mongotest.SimpleConvertTime("devices", "created_at"), - mongotest.SimpleConvertTime("devices", "last_seen"), - mongotest.SimpleConvertTime("devices", "status_updated_at"), - } -} - -func preInsertConnectedDevices() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertTime("connected_devices", "last_seen"), - } -} - -func preInsertFirewallRules() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("firewall_rules", "_id"), - } -} - -func preInsertSessions() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("sessions", "_id"), - mongotest.SimpleConvertTime("sessions", "started_at"), - mongotest.SimpleConvertTime("sessions", "last_seen"), - } -} - -func preInsertActiveSessions() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("active_sessions", "_id"), - mongotest.SimpleConvertTime("active_sessions", "last_seen"), - } -} - -func preInsertRecordedSessions() []mongotest.PreInsertFunc { - return []mongotest.PreInsertFunc{ - mongotest.SimpleConvertObjID("recorded_sessions", "_id"), - mongotest.SimpleConvertTime("recorded_sessions", "time"), - } -} diff --git a/api/pkg/gateway/context.go b/api/pkg/gateway/context.go index 6b4fc67ce19..7dfe12d2900 100644 --- a/api/pkg/gateway/context.go +++ b/api/pkg/gateway/context.go @@ -4,6 +4,7 @@ import ( "context" "github.com/labstack/echo/v4" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/models" ) @@ -20,10 +21,10 @@ func (c *Context) Service() interface{} { return c.service } -// Role returns the user's namespace role got from JWT through gateway. -// Notice: it can be empty if the user has no namespaces. -func (c *Context) Role() string { - return c.Request().Header.Get("X-Role") +// Role returns the user's namespace role got from JWT through gateway. It is +// empty if the user has no namespaces. +func (c *Context) Role() authorizer.Role { + return authorizer.RoleFromString(c.Request().Header.Get("X-Role")) } // Tenant returns the namespace's tenant got from JWT through gateway. @@ -90,12 +91,7 @@ func (c *Context) GetUsername() (string, bool) { return "", false } -// GetRole returns the user's namespace role got from JWT through gateway. -func (c *Context) GetRole() (string, bool) { - role := c.Request().Header.Get("X-Role") - if role != "" { - return role, true - } - - return "", false +// IsAdmin returns whether the user is an admin or not. +func (c *Context) IsAdmin() bool { + return c.Request().Header.Get("X-Admin") == "true" } diff --git a/api/pkg/gateway/context_test.go b/api/pkg/gateway/context_test.go new file mode 100644 index 00000000000..fb844281302 --- /dev/null +++ b/api/pkg/gateway/context_test.go @@ -0,0 +1,361 @@ +package gateway + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/labstack/echo/v4" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/require" +) + +func TestRole(t *testing.T) { + cases := []struct { + description string + headers map[string]string + expected authorizer.Role + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-Role": authorizer.RoleOwner.String(), + }, + expected: authorizer.Role("owner"), + }, { + description: "validate empty headers, for fail function", + headers: map[string]string{}, + expected: authorizer.RoleInvalid, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + role := ctxNew.Role() + + require.Equal(t, tc.expected, role) + }) + } +} + +func TestTenant(t *testing.T) { + cases := []struct { + description string + headers map[string]string + expected *models.Tenant + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, + expected: &models.Tenant{ + ID: "00000000-0000-4000-0000-000000000000", + }, + }, { + description: "validate empty headers, for fail function", + headers: map[string]string{}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + tenant := ctxNew.Tenant() + + require.Equal(t, tc.expected, tenant) + }) + } +} + +func TestUsername(t *testing.T) { + cases := []struct { + description string + headers map[string]string + expected *models.Username + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-Username": "someone", + }, + expected: &models.Username{ + ID: "someone", + }, + }, { + description: "validate empty headers, for fail function", + headers: map[string]string{}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + user := ctxNew.Username() + + require.Equal(t, tc.expected, user) + }) + } +} + +func TestID(t *testing.T) { + cases := []struct { + description string + headers map[string]string + expected *models.ID + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-ID": "507f191e810c19729de860ea", + }, + expected: &models.ID{ + ID: "507f191e810c19729de860ea", + }, + }, { + description: "validate empty headers, for fail function", + headers: map[string]string{}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + id := ctxNew.ID() + + require.Equal(t, tc.expected, id) + }) + } +} + +func TestGetID(t *testing.T) { + type Expected struct { + id string + ok bool + } + cases := []struct { + description string + headers map[string]string + expected Expected + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-ID": "507f191e810c19729de860ea", + }, + expected: Expected{ + id: "507f191e810c19729de860ea", + ok: true, + }, + }, { + description: "validate empty headers, for fail function", + headers: map[string]string{}, + expected: Expected{ + id: "", + ok: false, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + idNew, validate := ctxNew.GetID() + + require.Equal(t, tc.expected, Expected{idNew, validate}) + }) + } +} + +func TestGetTennat(t *testing.T) { + type Expected struct { + user string + ok bool + } + cases := []struct { + description string + headers map[string]string + expected Expected + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, + expected: Expected{ + user: "00000000-0000-4000-0000-000000000000", + ok: true, + }, + }, { + description: "validate empty headers, for fail function", + headers: map[string]string{}, + expected: Expected{ + user: "", + ok: false, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + userNew, validate := ctxNew.GetTennat() + + require.Equal(t, tc.expected, Expected{userNew, validate}) + }) + } +} + +func TestGetUsername(t *testing.T) { + type Expected struct { + user string + ok bool + } + cases := []struct { + description string + headers map[string]string + expected Expected + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-Username": "someone", + }, + expected: Expected{ + user: "someone", + ok: true, + }, + }, { + description: "validate empty headers, for fail function", + headers: map[string]string{}, + expected: Expected{ + user: "", + ok: false, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + userNew, validate := ctxNew.GetUsername() + + require.Equal(t, tc.expected, Expected{userNew, validate}) + }) + } +} diff --git a/api/pkg/gateway/gateway_test.go b/api/pkg/gateway/gateway_test.go new file mode 100644 index 00000000000..ceeb0040d36 --- /dev/null +++ b/api/pkg/gateway/gateway_test.go @@ -0,0 +1,159 @@ +package gateway + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + + "github.com/labstack/echo/v4" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/require" +) + +func TestTenantFromContext(t *testing.T) { + cases := []struct { + description string + headers map[string]string + expected *models.Tenant + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, + expected: &models.Tenant{ + ID: "00000000-0000-4000-0000-000000000000", + }, + }, { + description: "validate empty tenant string, for ID behavior", + headers: map[string]string{}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + ctxArg := context.WithValue(context.TODO(), "ctx", &ctxNew) // nolint:revive + + tenant := TenantFromContext(ctxArg) + + require.Equal(t, tc.expected, tenant) + }) + } +} + +func TestUsernameFromContext(t *testing.T) { + cases := []struct { + description string + headers map[string]string + expected *models.Username + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-Username": "someone", + }, + expected: &models.Username{ + ID: "someone", + }, + }, { + description: "validate empty username string, for ID behavior", + headers: map[string]string{}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + ctxArg := context.WithValue(context.TODO(), "ctx", &ctxNew) // nolint:revive + + user := UsernameFromContext(ctxArg) + + require.Equal(t, tc.expected, user) + }) + } +} + +func TestIDFromContext(t *testing.T) { + cases := []struct { + description string + headers map[string]string + expected *models.ID + }{ + { + description: "verify if given value returns from header", + headers: map[string]string{ + "X-ID": "507f191e810c19729de860ea", + }, + expected: &models.ID{ + ID: "507f191e810c19729de860ea", + }, + }, { + description: "validate empty ID string, for ID behavior", + headers: map[string]string{}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + e := echo.New() + req := httptest.NewRequest(http.MethodPost, "/", nil) + + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + ctx := e.NewContext(req, rec) + + ctxNew := Context{ + nil, + ctx, + } + + ctxArg := context.WithValue(context.TODO(), "ctx", &ctxNew) // nolint:revive + + id := IDFromContext(ctxArg) + + require.Equal(t, tc.expected, id) + }) + } +} diff --git a/api/pkg/guard/actions.go b/api/pkg/guard/actions.go deleted file mode 100644 index 1bbd6accc19..00000000000 --- a/api/pkg/guard/actions.go +++ /dev/null @@ -1,92 +0,0 @@ -package guard - -type Action int - -// AllActions is a struct to act like an Enum and facilitate to indicate the action used in the service. -type AllActions struct { - Device DeviceActions - Session SessionActions - Firewall FirewallActions - PublicKey PublicKeyActions - Namespace NamespaceActions - Billing BillingActions -} - -type DeviceActions struct { - Accept, Reject, Update, Remove, Connect, Rename, CreateTag, UpdateTag, RemoveTag, RenameTag, DeleteTag int -} - -type SessionActions struct { - Play, Close, Remove, Details int -} - -type FirewallActions struct { - Create, Edit, Remove, AddTag, UpdateTag, RemoveTag int -} - -type PublicKeyActions struct { - Create, Edit, Remove, AddTag, RemoveTag, UpdateTag int -} - -type NamespaceActions struct { - Rename, AddMember, RemoveMember, EditMember, EnableSessionRecord, Delete int -} - -type BillingActions struct { - CreateCustomer, ChooseDevices, AddPaymentMethod, UpdatePaymentMethod, RemovePaymentMethod, CancelSubscription, CreateSubscription, GetSubscription int -} - -// Actions has all available and allowed actions. -// You should use it to get the code's action. -var Actions = AllActions{ - Device: DeviceActions{ - Accept: DeviceAccept, - Reject: DeviceReject, - Update: DeviceUpdate, - Remove: DeviceRemove, - Connect: DeviceConnect, - Rename: DeviceRename, - CreateTag: DeviceCreateTag, - UpdateTag: DeviceUpdateTag, - RemoveTag: DeviceRemoveTag, - RenameTag: DeviceRenameTag, - DeleteTag: DeviceDeleteTag, - }, - Session: SessionActions{ - Play: SessionPlay, - Close: SessionClose, - Remove: SessionRemove, - Details: SessionDetails, - }, - Firewall: FirewallActions{ - Create: FirewallCreate, - Edit: FirewallEdit, - Remove: FirewallRemove, - }, - PublicKey: PublicKeyActions{ - Create: PublicKeyCreate, - Edit: PublicKeyEdit, - Remove: PublicKeyRemove, - AddTag: PublicKeyAddTag, - RemoveTag: PublicKeyRemoveTag, - UpdateTag: PublicKeyUpdateTag, - }, - Namespace: NamespaceActions{ - Rename: NamespaceRename, - AddMember: NamespaceAddMember, - RemoveMember: NamespaceRemoveMember, - EditMember: NamespaceEditMember, - EnableSessionRecord: NamespaceEnableSessionRecord, - Delete: NamespaceDelete, - }, - Billing: BillingActions{ - CreateCustomer: BillingCreateCustomer, - ChooseDevices: BillingChooseDevices, - AddPaymentMethod: BillingAddPaymentMethod, - UpdatePaymentMethod: BillingUpdatePaymentMethod, - RemovePaymentMethod: BillingRemovePaymentMethod, - CancelSubscription: BillingCancelSubscription, - CreateSubscription: BillingCreateSubscription, - GetSubscription: BillingGetSubscription, - }, -} diff --git a/api/pkg/guard/deprecated.go b/api/pkg/guard/deprecated.go deleted file mode 100644 index c958d1b6463..00000000000 --- a/api/pkg/guard/deprecated.go +++ /dev/null @@ -1,11 +0,0 @@ -package guard - -import "github.com/shellhub-io/shellhub/pkg/models" - -// Deprecated, use namespace.FindMember() instead. -// -// CheckMember checks if a models.User's ID is a models.Namespace's member. A models.User is a member if its ID is in -// the models.Namespace's members list. -func CheckMember(namespace *models.Namespace, id string) (*models.Member, bool) { - return namespace.FindMember(id) -} diff --git a/api/pkg/guard/errors.go b/api/pkg/guard/errors.go deleted file mode 100644 index 596155afe78..00000000000 --- a/api/pkg/guard/errors.go +++ /dev/null @@ -1,13 +0,0 @@ -package guard - -import "github.com/shellhub-io/shellhub/pkg/errors" - -// ErrLayer is an error level. Each error defined at this level, is container to it. -// ErrLayer is the errors' level for guard's error. -var ErrLayer = "guard" - -// ErrCodeForbidden is the error code when the access to a resource is forbidden. -const ErrCodeForbidden = iota + 1 - -// ErrForbidden is used to indicate that access to a resource is forbidden. -var ErrForbidden = errors.New("access forbidden", ErrLayer, ErrCodeForbidden) diff --git a/api/pkg/guard/guard.go b/api/pkg/guard/guard.go deleted file mode 100644 index dc10b622b22..00000000000 --- a/api/pkg/guard/guard.go +++ /dev/null @@ -1,114 +0,0 @@ -// Package guard is a helper to work around permissions on ShellHub API. -package guard - -import ( - "github.com/shellhub-io/shellhub/pkg/models" -) - -const ( - RoleObserver = "observer" - RoleOperator = "operator" - RoleAdministrator = "administrator" - RoleOwner = "owner" -) - -// RoleInvalidCode is a role code for invalid role. -const RoleInvalidCode = -1 - -const ( - // RoleObserverCode is a role code for observer. - RoleObserverCode = iota + 1 - // RoleOperatorCode is a role code for operator. - RoleOperatorCode - // RoleAdministratorCode is a role code for administrator. - RoleAdministratorCode - // RoleOwnerCode is a role code for owner. - RoleOwnerCode -) - -// Roles maps all roles to its code. -var Roles = map[string]int{ - RoleObserver: RoleObserverCode, - RoleOperator: RoleOperatorCode, - RoleAdministrator: RoleAdministratorCode, - RoleOwner: RoleOwnerCode, -} - -// RolePermissions maps roles to its Permissions. It is used to check if a models.Member has permission to do something. -var RolePermissions = map[string]Permissions{ - RoleObserver: observerPermissions, - RoleOperator: operatorPermissions, - RoleAdministrator: adminPermissions, - RoleOwner: ownerPermissions, -} - -// GetRoleCode converts a models.Member's role string to a role code. If the role is not found in Roles, it returns RoleInvalidCode. -func GetRoleCode(role string) int { - code, ok := Roles[role] - if !ok { - // return RoleInvalidCode when member's role is not valid. - return RoleInvalidCode - } - - return code -} - -// CheckRole checks if a models.Member's role from a models.Namespace can act over the other. Active is the member's role -// from who is acting, and passive is the member who is receiving. Active and passive roles must be members of the -// same models.Namespace. -// -// If active or passive is an invalid member, a member with a role no mapped, it returns false. If active and passive are -// equal, it returns false too. -// -// The valid roles are: RoleObserver, RoleOperator, RoleAdmin or RoleOwner. -func CheckRole(active, passive string) bool { - first := GetRoleCode(active) - second := GetRoleCode(passive) - - if first == RoleInvalidCode || second == RoleInvalidCode { - return false - } - - if first == second { - return false - } - - return first > second -} - -// EvaluatePermission checks if a models.Namespace's member has the role that allows an action. Each role has a list of -// allowed actions. -// -// Role is the member's role from who is acting, Action is the action that is being performed and callback is a function -// to be called if the action is allowed. -func EvaluatePermission(role string, action int, callback func() error) error { - check := func(action int, permissions Permissions) bool { - for _, permission := range permissions { - if permission == action { - return true - } - } - - return false - } - - permission, ok := RolePermissions[role] - if !ok { - return ErrForbidden - } - - if !check(action, permission) { - return ErrForbidden - } - - return callback() -} - -func EvaluateNamespace(namespace *models.Namespace, userID string, action int, callback func() error) error { - member, ok := namespace.FindMember(userID) - if !ok { - return ErrForbidden - } - - return EvaluatePermission(member.Role, action, callback) -} diff --git a/api/pkg/guard/guard_test.go b/api/pkg/guard/guard_test.go deleted file mode 100644 index 31745318f5c..00000000000 --- a/api/pkg/guard/guard_test.go +++ /dev/null @@ -1,508 +0,0 @@ -package guard - -import ( - "errors" - "fmt" - "testing" - - "github.com/shellhub-io/shellhub/api/store/mocks" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/assert" -) - -func TestEvaluateRole(t *testing.T) { - cases := []struct { - name string - exec func(t *testing.T) - }{ - { - name: "Fail when the first role is not great than the second one", - exec: func(t *testing.T) { - t.Helper() - assert.False(t, CheckRole(RoleAdministrator, RoleOwner)) - }, - }, - { - name: "Fail when a role is not valid", - exec: func(t *testing.T) { - t.Helper() - assert.False(t, CheckRole("invalidRole", RoleOperator)) - }, - }, - { - name: "Fail when both roles are equals", - exec: func(t *testing.T) { - t.Helper() - assert.False(t, CheckRole(RoleOperator, RoleOperator)) - }, - }, - { - name: "Success when the first role is great than the second one", - exec: func(t *testing.T) { - t.Helper() - assert.True(t, CheckRole(RoleAdministrator, RoleOperator)) - }, - }, - } - - for _, test := range cases { - t.Run(test.name, test.exec) - } -} - -func TestEvaluatePermission(t *testing.T) { - cases := []struct { - name string - exec func(t *testing.T) - }{ - { - name: "Fails when member's role has no permission", - exec: func(t *testing.T) { - t.Helper() - - role := RoleObserver - action := Actions.Firewall.Create - assert.Error(t, EvaluatePermission(role, action, nil)) - }, - }, - { - name: "Success when member's role has permission", - exec: func(t *testing.T) { - t.Helper() - - role := RoleOwner - action := Actions.Firewall.Create - assert.NoError(t, EvaluatePermission(role, action, func() error { - return nil - })) - }, - }, - } - - for _, test := range cases { - t.Run(test.name, test.exec) - } -} - -func TestEvaluateSubject(t *testing.T) { - mock := &mocks.Store{} - - memberOperator := models.Member{ - ID: "memberOperatorID", - Username: "memberOperatorUsername", - Role: RoleOperator, - } - - memberAdministrator := models.Member{ - ID: "memberAdministratorID", - Username: "memberAdministratorUsername", - Role: RoleAdministrator, - } - - memberOwner := models.Member{ - ID: "memberOwnerID", - Username: "memberOwnerUsername", - Role: RoleOwner, - } - - passiveRoleOperator := RoleOperator - passiveRoleObserver := RoleObserver - passiveRoleAdministrator := RoleAdministrator - passiveRoleOwner := RoleOwner - - cases := []struct { - description string - memberActive models.Member - rolePassive string - expected bool - }{ - { - description: "CheckRole successes when active user is a operator and passive role is observer", - memberActive: memberOperator, - rolePassive: passiveRoleObserver, - expected: true, - }, - { - description: "CheckRole fails when active user is a operator and passive role is operator", - memberActive: memberOperator, - rolePassive: passiveRoleOperator, - expected: false, - }, - { - description: "CheckRole fails when active user is a operator and passive role is administrator", - memberActive: memberOperator, - rolePassive: passiveRoleAdministrator, - expected: false, - }, - { - description: "CheckRole successes when active user is a administrator and passive role is observer", - memberActive: memberAdministrator, - rolePassive: passiveRoleObserver, - expected: true, - }, - { - description: "CheckRole success when active user is a administrator and passive role is operator", - memberActive: memberAdministrator, - rolePassive: passiveRoleOperator, - expected: true, - }, - { - description: "CheckRole fails when active user is a administrator and passive role is administrator", - memberActive: memberAdministrator, - rolePassive: passiveRoleAdministrator, - expected: false, - }, - { - description: "CheckRole fails when active user is a administrator and passive role is owner", - memberActive: memberAdministrator, - rolePassive: passiveRoleOwner, - expected: false, - }, - { - description: "CheckRole fails when active user is a owner and passive role is observer", - memberActive: memberOwner, - rolePassive: passiveRoleObserver, - expected: true, - }, - { - description: "CheckRole fails when active user is a owner and passive role is operator", - memberActive: memberOwner, - rolePassive: passiveRoleObserver, - expected: true, - }, - { - description: "CheckRole fails when active user is a owner and passive role is administrator", - memberActive: memberOwner, - rolePassive: passiveRoleAdministrator, - expected: true, - }, - { - description: "CheckRole fails when active user is a owner and passive role is owner", - memberActive: memberOwner, - rolePassive: passiveRoleOwner, - expected: false, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - ok := CheckRole(tc.memberActive.Role, tc.rolePassive) - assert.Equal(t, tc.expected, ok) - }) - } - - mock.AssertExpectations(t) -} - -func TestEvaluateNamespace(t *testing.T) { - userOwner := &models.User{ - ID: "userOwnerID", - UserData: models.UserData{ - Name: "userOwner", - Email: "userOwner@userOwner.com", - Username: "usernameOwner", - }, - } - - userObserver := &models.User{ - ID: "userObserverID", - UserData: models.UserData{ - Name: "userObserver", - Email: "userObserver@userObserver.com", - Username: "usernameObserver", - }, - } - userOperator := &models.User{ - ID: "userOperatorID", - UserData: models.UserData{ - Name: "userOperator", - Email: "userOperator@userOperator.com", - Username: "usernameOperator", - }, - } - - userAdministrator := &models.User{ - ID: "userAdministratorID", - UserData: models.UserData{ - Name: "userAdministrator", - Email: "userAdministrator@userAdministrator.com", - Username: "usernameAdministrator", - }, - } - - namespace := &models.Namespace{ - Name: "namespace", - Owner: userOwner.ID, - TenantID: "tenantID", - Members: []models.Member{ - { - ID: userOwner.ID, - Role: RoleOwner, - }, - { - ID: userObserver.ID, - Role: RoleObserver, - }, - { - ID: userOperator.ID, - Role: RoleOperator, - }, - { - ID: userAdministrator.ID, - Role: RoleAdministrator, - }, - }, - } - - cases := []struct { - description string - id string - namespace *models.Namespace - expected bool - }{ - { - description: "Fails when user is not inside the namespace", - id: "invalidUserID", - namespace: namespace, - expected: false, - }, - { - description: "Success find the user inside the namespace 1", - id: userObserver.ID, - namespace: namespace, - expected: true, - }, - { - description: "Success find the user inside the namespace 2", - id: userOperator.ID, - namespace: namespace, - expected: true, - }, - { - description: "Success find the user inside the namespace 3", - id: userAdministrator.ID, - namespace: namespace, - expected: true, - }, - { - description: "Success find the user inside the namespace 4", - id: userOwner.ID, - namespace: namespace, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - _, ok := CheckMember(tc.namespace, tc.id) - - assert.Equal(t, tc.expected, ok) - }) - } -} - -func TestCheckPermission(t *testing.T) { - mock := &mocks.Store{} - - cases := []struct { - description string - role string - actions []int - requiredMocks func() - expected bool - }{ - { - description: "CheckPermission success when user is the observer", - role: RoleObserver, - actions: []int{ - Actions.Device.Connect, - - Actions.Session.Details, - }, - requiredMocks: func() { - }, - expected: true, - }, - { - description: "CheckPermission success when user is the operator", - role: RoleOperator, - actions: []int{ - Actions.Device.Accept, - Actions.Device.Reject, - Actions.Device.Connect, - Actions.Device.Rename, - Actions.Device.Update, - - Actions.Device.CreateTag, - Actions.Device.UpdateTag, - Actions.Device.RemoveTag, - Actions.Device.RenameTag, - Actions.Device.DeleteTag, - - Actions.Session.Details, - }, - requiredMocks: func() { - }, - expected: true, - }, - { - description: "CheckPermission success when user is the administrator", - role: RoleAdministrator, - actions: []int{ - Actions.Device.Accept, - Actions.Device.Reject, - Actions.Device.Remove, - Actions.Device.Connect, - Actions.Device.Rename, - Actions.Device.Update, - - Actions.Device.CreateTag, - Actions.Device.UpdateTag, - Actions.Device.RemoveTag, - Actions.Device.RenameTag, - Actions.Device.DeleteTag, - - Actions.Session.Play, - Actions.Session.Close, - Actions.Session.Remove, - Actions.Session.Details, - - Actions.Firewall.Create, - Actions.Firewall.Edit, - Actions.Firewall.Remove, - - Actions.PublicKey.Create, - Actions.PublicKey.Edit, - Actions.PublicKey.Remove, - - Actions.Namespace.Rename, - Actions.Namespace.AddMember, - Actions.Namespace.RemoveMember, - Actions.Namespace.EditMember, - Actions.Namespace.EnableSessionRecord, - }, - requiredMocks: func() { - }, - expected: true, - }, - { - description: "CheckPermission success when user is the owner", - role: RoleOwner, - actions: []int{ - Actions.Device.Accept, - Actions.Device.Reject, - Actions.Device.Remove, - Actions.Device.Connect, - Actions.Device.Rename, - Actions.Device.Update, - - Actions.Device.CreateTag, - Actions.Device.UpdateTag, - Actions.Device.RemoveTag, - Actions.Device.RenameTag, - Actions.Device.DeleteTag, - - Actions.Session.Play, - Actions.Session.Close, - Actions.Session.Remove, - Actions.Session.Details, - - Actions.Firewall.Create, - Actions.Firewall.Edit, - Actions.Firewall.Remove, - - Actions.PublicKey.Create, - Actions.PublicKey.Edit, - Actions.PublicKey.Remove, - - Actions.Namespace.Rename, - Actions.Namespace.AddMember, - Actions.Namespace.RemoveMember, - Actions.Namespace.EditMember, - Actions.Namespace.EnableSessionRecord, - Actions.Namespace.Delete, - - Actions.Billing.AddPaymentMethod, - Actions.Billing.UpdatePaymentMethod, - Actions.Billing.RemovePaymentMethod, - Actions.Billing.ChooseDevices, - Actions.Billing.CancelSubscription, - Actions.Billing.CreateSubscription, - Actions.Billing.GetSubscription, - }, - requiredMocks: func() { - }, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - for _, action := range tc.actions { - assert.NoError(t, EvaluatePermission(tc.role, action, func() error { - return nil - })) - } - }) - } - - mock.AssertExpectations(t) -} - -func ExampleCheckRole_observer_and_observer() { - // If members have the same role, they cannot act over each other. - active := RoleObserver - passive := RoleObserver - fmt.Println(CheckRole(active, passive)) - // Output: false -} - -func ExampleCheckRole_operator_and_observer() { - // If active member has a great roles, it can act over passive one. - active := RoleOperator - passive := RoleObserver - fmt.Println(CheckRole(active, passive)) - // Output: true -} - -func ExampleCheckRole_owner_and_observer() { - // If active member is owner, it can act over everyone. - active := RoleOwner - passive := RoleObserver - fmt.Println(CheckRole(active, passive)) - // Output: true -} - -func ExampleEvaluatePermission_callback() { - // RoleObserver can connect to device. - err := EvaluatePermission(RoleObserver, Actions.Device.Connect, func() error { - return errors.New("something went wrong") - }) - fmt.Println(err) - // Output: something went wrong -} - -func ExampleEvaluatePermission_no_callback() { - // RoleObserver cannot accept a device, so Forbidden is returned from EvaluatePermission. - err := EvaluatePermission(RoleObserver, Actions.Device.Accept, func() error { - // As RoleObserver has no permission to accept a device, this function will never be called. - return errors.New("something went wrong") - }) - fmt.Println(err) - // Output: access forbidden -} - -func ExampleGetRoleCode() { - fmt.Println(GetRoleCode(RoleObserver)) - fmt.Println(GetRoleCode(RoleOperator)) - fmt.Println(GetRoleCode(RoleAdministrator)) - fmt.Println(GetRoleCode("")) - fmt.Println(GetRoleCode("developer")) - // Output: - // 1 - // 2 - // 3 - // -1 - // -1 -} diff --git a/api/pkg/guard/permissions.go b/api/pkg/guard/permissions.go deleted file mode 100644 index 17b89c790c1..00000000000 --- a/api/pkg/guard/permissions.go +++ /dev/null @@ -1,176 +0,0 @@ -package guard - -type Permissions []int - -const ( - DeviceAccept = iota + 1 - DeviceReject - DeviceUpdate - DeviceRemove - DeviceConnect - DeviceRename - DeviceDetails - - DeviceCreateTag - DeviceUpdateTag - DeviceRemoveTag - DeviceRenameTag - DeviceDeleteTag - - SessionPlay - SessionClose - SessionRemove - SessionDetails - - FirewallCreate - FirewallEdit - FirewallRemove - - FirewallAddTag - FirewallRemoveTag - FirewallUpdateTag - - PublicKeyCreate - PublicKeyEdit - PublicKeyRemove - - PublicKeyAddTag - PublicKeyRemoveTag - PublicKeyUpdateTag - - NamespaceRename - NamespaceAddMember - NamespaceRemoveMember - NamespaceEditMember - NamespaceEnableSessionRecord - NamespaceDelete - - BillingCreateCustomer - BillingChooseDevices - BillingAddPaymentMethod - BillingUpdatePaymentMethod - BillingRemovePaymentMethod - BillingCancelSubscription - BillingCreateSubscription - BillingGetPaymentMethod - BillingGetSubscription -) - -var observerPermissions = Permissions{ - DeviceConnect, - DeviceDetails, - SessionDetails, -} - -var operatorPermissions = Permissions{ - DeviceAccept, - DeviceReject, - DeviceConnect, - DeviceRename, - DeviceDetails, - DeviceUpdate, - - DeviceCreateTag, - DeviceUpdateTag, - DeviceRemoveTag, - DeviceRenameTag, - DeviceDeleteTag, - - SessionDetails, -} - -var adminPermissions = Permissions{ - DeviceAccept, - DeviceReject, - DeviceRemove, - DeviceConnect, - DeviceRename, - DeviceDetails, - DeviceUpdate, - - DeviceCreateTag, - DeviceUpdateTag, - DeviceRemoveTag, - DeviceRenameTag, - DeviceDeleteTag, - - DeviceUpdate, - - SessionPlay, - SessionClose, - SessionRemove, - SessionDetails, - - FirewallCreate, - FirewallEdit, - FirewallRemove, - FirewallAddTag, - FirewallRemoveTag, - FirewallUpdateTag, - - PublicKeyCreate, - PublicKeyEdit, - PublicKeyRemove, - PublicKeyAddTag, - PublicKeyRemoveTag, - PublicKeyUpdateTag, - - NamespaceRename, - NamespaceAddMember, - NamespaceRemoveMember, - NamespaceEditMember, - NamespaceEnableSessionRecord, -} - -var ownerPermissions = Permissions{ - DeviceAccept, - DeviceReject, - DeviceRemove, - DeviceConnect, - DeviceRename, - DeviceDetails, - DeviceUpdate, - - DeviceCreateTag, - DeviceUpdateTag, - DeviceRemoveTag, - DeviceRenameTag, - DeviceDeleteTag, - - DeviceUpdate, - - SessionPlay, - SessionClose, - SessionRemove, - SessionDetails, - - FirewallCreate, - FirewallEdit, - FirewallRemove, - FirewallAddTag, - FirewallRemoveTag, - FirewallUpdateTag, - - PublicKeyCreate, - PublicKeyEdit, - PublicKeyRemove, - PublicKeyAddTag, - PublicKeyRemoveTag, - PublicKeyUpdateTag, - - NamespaceRename, - NamespaceAddMember, - NamespaceRemoveMember, - NamespaceEditMember, - NamespaceEnableSessionRecord, - NamespaceDelete, - - BillingCreateCustomer, - BillingChooseDevices, - BillingAddPaymentMethod, - BillingUpdatePaymentMethod, - BillingRemovePaymentMethod, - BillingCancelSubscription, - BillingCreateSubscription, - BillingGetSubscription, -} diff --git a/api/pkg/openapi/openapi.go b/api/pkg/openapi/openapi.go new file mode 100644 index 00000000000..d98d719bfb3 --- /dev/null +++ b/api/pkg/openapi/openapi.go @@ -0,0 +1,203 @@ +package openapi + +import ( + "bytes" + "context" + "fmt" + "io" + "net/http" + "net/url" + "sync" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/getkin/kin-openapi/openapi3filter" + "github.com/getkin/kin-openapi/routers" + "github.com/getkin/kin-openapi/routers/gorillamux" + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/sirupsen/logrus" +) + +// OpenAPIValidator validates HTTP responses against OpenAPI specification +type OpenAPIValidator struct { + router routers.Router + failOnMismatch bool + enabledPaths map[string]bool + mu sync.RWMutex + logger *logrus.Entry +} + +// ValidationResult contains the result of response validation +type ValidationResult struct { + Valid bool + Error string + Path string + Method string + StatusCode int +} + +// OpenAPIValidatorConfig holds configuration for the validator +type OpenAPIValidatorConfig struct { + // SchemaPath is the URL to the OpenAPI schema. + SchemaPath *url.URL + // EnabledPaths are the paths that should be validated (nil = all paths) + EnabledPaths []string + // FailOnMismatch determines if validation failures should cause HTTP errors + FailOnMismatch bool + // Logger for validation messages + Logger *logrus.Entry +} + +// NewOpenAPIValidator creates a new OpenAPI response validator +func NewOpenAPIValidator(ctx context.Context, config *OpenAPIValidatorConfig) (*OpenAPIValidator, error) { + if config == nil { + config = &OpenAPIValidatorConfig{} + } + + if config.Logger == nil { + config.Logger = logrus.NewEntry(logrus.StandardLogger()) + } + + validator := &OpenAPIValidator{ + failOnMismatch: config.FailOnMismatch && envs.IsDevelopment(), + enabledPaths: make(map[string]bool), + logger: config.Logger, + } + + if config.SchemaPath == nil { + config.SchemaPath = GetDefaultSchemaPath() + } + + if config.SchemaPath == nil { + return nil, fmt.Errorf("OpenAPI schema path is not defined") + } + + loader := &openapi3.Loader{ + Context: ctx, + IsExternalRefsAllowed: true, + } + + doc, err := loader.LoadFromURI(config.SchemaPath) + if err != nil { + return nil, fmt.Errorf("failed to load OpenAPI schema: %w", err) + } + + if err := doc.Validate(ctx); err != nil { + return nil, fmt.Errorf("invalid OpenAPI schema: %w", err) + } + + router, err := gorillamux.NewRouter(doc) + if err != nil { + return nil, fmt.Errorf("failed to create OpenAPI router: %w", err) + } + + validator.router = router + + if len(config.EnabledPaths) > 0 { + for _, path := range config.EnabledPaths { + validator.enabledPaths[path] = true + } + } + + config.Logger.Info("OpenAPI response validator initialized successfully") + + return validator, nil +} + +// ValidateResponse validates an HTTP response against the OpenAPI schema +func (v *OpenAPIValidator) ValidateResponse(r *http.Request, response *http.Response, responseBody []byte) *ValidationResult { + result := &ValidationResult{ + Valid: true, + Path: r.URL.Path, + Method: r.Method, + StatusCode: response.StatusCode, + } + + v.mu.RLock() + defer v.mu.RUnlock() + + if len(v.enabledPaths) > 0 && !v.enabledPaths[r.URL.Path] { + return result + } + + route, pathParams, err := v.router.FindRoute(r) + if err != nil { + v.logger.WithFields(logrus.Fields{ + "path": r.URL.Path, + "method": r.Method, + "error": err.Error(), + }).Debug("Path not found in OpenAPI spec") + + return result + } + + v.logger.WithFields(logrus.Fields{ + "path": r.URL.Path, + "method": r.Method, + }).Debug("Path found in OpenAPI spec, proceeding with validation") + + requestValidationInput := &openapi3filter.RequestValidationInput{ + Request: r, + PathParams: pathParams, + Route: route, + } + + responseValidationInput := &openapi3filter.ResponseValidationInput{ + RequestValidationInput: requestValidationInput, + Status: response.StatusCode, + Header: response.Header, + Body: io.NopCloser(bytes.NewReader(responseBody)), + } + + ctx := context.Background() + + if err := openapi3filter.ValidateResponse(ctx, responseValidationInput); err != nil { + result.Valid = false + result.Error = err.Error() + + v.logger.WithFields(logrus.Fields{ + "path": r.URL.Path, + "method": r.Method, + "status_code": response.StatusCode, + "error": err.Error(), + }).Trace("OpenAPI response validation failed") + } else { + v.logger.WithFields(logrus.Fields{ + "path": r.URL.Path, + "method": r.Method, + "status_code": response.StatusCode, + }).Trace("OpenAPI response validation passed") + } + + return result +} + +// EnablePath enables validation for a specific path +func (v *OpenAPIValidator) EnablePath(path string) { + v.mu.Lock() + defer v.mu.Unlock() + v.enabledPaths[path] = true +} + +// DisablePath disables validation for a specific path +func (v *OpenAPIValidator) DisablePath(path string) { + v.mu.Lock() + defer v.mu.Unlock() + delete(v.enabledPaths, path) +} + +// ShouldFailOnMismatch returns whether validation failures should cause HTTP errors +func (v *OpenAPIValidator) ShouldFailOnMismatch() bool { + return v.failOnMismatch +} + +// GetDefaultSchemaPath returns the default path to the OpenAPI schema +func GetDefaultSchemaPath() *url.URL { + // NOTE: This path refers to the generated OpenAPI spec file. + // TODO: Make this configurable via environment variable if needed. + u, err := url.Parse("http://openapi:8080/openapi/openapi.json") + if err != nil { + return nil + } + + return u +} diff --git a/api/pkg/responses/system.go b/api/pkg/responses/system.go new file mode 100644 index 00000000000..e0eedf5b99d --- /dev/null +++ b/api/pkg/responses/system.go @@ -0,0 +1,18 @@ +package responses + +type SystemInfo struct { + Version string `json:"version"` + Endpoints *SystemEndpointsInfo `json:"endpoints"` + Setup bool `json:"setup"` + Authentication *SystemAuthenticationInfo `json:"authentication"` +} + +type SystemAuthenticationInfo struct { + Local bool `json:"local"` + SAML bool `json:"saml"` +} + +type SystemEndpointsInfo struct { + API string `json:"api"` + SSH string `json:"ssh"` +} diff --git a/api/refresh.yml b/api/refresh.yml index 00375ecde35..4169bf2a114 100644 --- a/api/refresh.yml +++ b/api/refresh.yml @@ -5,7 +5,7 @@ included_extensions: - .go build_target_path: "" build_path: /go/src/github.com/shellhub-io/shellhub/api -build_flags: [] +build_flags: ["-gcflags=all=-N -l"] build_delay: 200ns binary_name: api command_flags: ["server"] diff --git a/api/routes/api-key.go b/api/routes/api-key.go new file mode 100644 index 00000000000..ecc67442372 --- /dev/null +++ b/api/routes/api-key.go @@ -0,0 +1,102 @@ +package routes + +import ( + "net/http" + "strconv" + + "github.com/shellhub-io/shellhub/api/pkg/gateway" + "github.com/shellhub-io/shellhub/pkg/api/requests" +) + +const ( + CreateAPIKeyURL = "/namespaces/api-key" + ListAPIKeysURL = "/namespaces/api-key" + UpdateAPIKeyURL = "/namespaces/api-key/:name" + DeleteAPIKeyURL = "/namespaces/api-key/:name" +) + +func (h *Handler) CreateAPIKey(c gateway.Context) error { + req := new(requests.CreateAPIKey) + + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { + return err + } + + res, err := h.service.CreateAPIKey(c.Ctx(), req) + if err != nil { + return err + } + + return c.JSON(http.StatusOK, res) +} + +func (h *Handler) ListAPIKeys(c gateway.Context) error { + req := new(requests.ListAPIKey) + + if err := c.Bind(req); err != nil { + return err + } + + req.Paginator.Normalize() + + if req.Sorter.By == "" { + req.Sorter.By = "expires_in" + } + + if req.Sorter.Order == "" { + req.Sorter.Order = "desc" + } + + if err := c.Validate(req); err != nil { + return err + } + + res, count, err := h.service.ListAPIKeys(c.Ctx(), req) + if err != nil { + return err + } + + c.Response().Header().Set("X-Total-Count", strconv.Itoa(count)) + + return c.JSON(http.StatusOK, res) +} + +func (h *Handler) UpdateAPIKey(c gateway.Context) error { + req := new(requests.UpdateAPIKey) + + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { + return err + } + + if err := h.service.UpdateAPIKey(c.Ctx(), req); err != nil { + return err + } + + return c.NoContent(http.StatusOK) +} + +func (h *Handler) DeleteAPIKey(c gateway.Context) error { + req := new(requests.DeleteAPIKey) + + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { + return err + } + + if err := h.service.DeleteAPIKey(c.Ctx(), req); err != nil { + return err + } + + return c.NoContent(http.StatusOK) +} diff --git a/api/routes/api-key_test.go b/api/routes/api-key_test.go new file mode 100644 index 00000000000..435d718edce --- /dev/null +++ b/api/routes/api-key_test.go @@ -0,0 +1,805 @@ +package routes + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "strings" + "testing" + "time" + + servicemock "github.com/shellhub-io/shellhub/api/services/mocks" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/api/responses" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" +) + +func TestCreateAPIKey(t *testing.T) { + type Expected struct { + body *responses.CreateAPIKey + status int + } + + svcMock := new(servicemock.Service) + + cases := []struct { + description string + headers map[string]string + body map[string]interface{} + requiredMocks func() + expected Expected + }{ + { + description: "fails with api key", + headers: map[string]string{ + "Content-Type": "application/json", + "X-API-KEY": "b2f7cc0e-d933-4aad-9ab2-b557f2f2554f", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "dev", + "expires_at": 30, + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusForbidden}, + }, + { + description: "fails when role is observer", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "observer", + }, + body: map[string]interface{}{ + "name": "dev", + "expires_at": 30, + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusForbidden}, + }, + { + description: "fails when role is operator", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "operator", + }, + body: map[string]interface{}{ + "name": "dev", + "expires_at": 30, + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusForbidden}, + }, + { + description: "fails when name is invalid due to length < 3", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "de", + "expires_at": 30, + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusBadRequest}, + }, + { + description: "fails when name is invalid due to length > 20", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "devdevdevdevdevdevdev", + "expires_at": 30, + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusBadRequest}, + }, + { + description: "fails when name is invalid due to whitespaces", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "dev dev", + "expires_at": 30, + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusBadRequest}, + }, + { + description: "fails when expires_at is invalid", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "dev", + "expires_at": 0, + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusBadRequest}, + }, + { + description: "fails when key is provided but invalid", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "dev", + "expires_at": 0, + "key": "invalid", + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusBadRequest}, + }, + { + description: "fails when optional role is provided but invalid", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "dev", + "expires_at": 0, + "role": "invalid", + }, + requiredMocks: func() { + }, + expected: Expected{body: nil, status: http.StatusBadRequest}, + }, + { + description: "succeeds", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "dev", + "expires_at": 30, + }, + requiredMocks: func() { + svcMock.On( + "CreateAPIKey", + mock.Anything, + &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "dev", + Role: "owner", + ExpiresAt: 30, + }). + Return(&responses.CreateAPIKey{}, nil). + Once() + }, + expected: Expected{ + body: &responses.CreateAPIKey{}, + status: http.StatusOK, + }, + }, + { + description: "succeeds with optional body", + headers: map[string]string{ + "Content-Type": "application/json", + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]interface{}{ + "name": "dev", + "expires_at": 30, + "key": "3d7a3ea1-d1eb-4ffc-8c14-f7bfd1b7c550", + "role": "administrator", + }, + requiredMocks: func() { + svcMock.On( + "CreateAPIKey", + mock.Anything, + &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "dev", + Role: "owner", + ExpiresAt: 30, + Key: "3d7a3ea1-d1eb-4ffc-8c14-f7bfd1b7c550", + OptRole: "administrator", + }). + Return(&responses.CreateAPIKey{}, nil). + Once() + }, + expected: Expected{ + body: &responses.CreateAPIKey{}, + status: http.StatusOK, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + data, err := json.Marshal(tc.body) + require.NoError(t, err) + + req := httptest.NewRequest(http.MethodPost, "/api/namespaces/api-key", strings.NewReader(string(data))) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + e := NewRouter(svcMock) + e.ServeHTTP(rec, req) + + require.Equal(t, tc.expected.status, rec.Result().StatusCode) + if tc.expected.body != nil { + responseBody := new(responses.CreateAPIKey) + require.NoError(t, json.NewDecoder(rec.Body).Decode(&responseBody)) + require.Equal(t, tc.expected.body, responseBody) + } + }) + } +} + +func TestListAPIKey(t *testing.T) { + type Expected struct { + body []models.APIKey + status int + } + + svcMock := new(servicemock.Service) + + cases := []struct { + description string + headers map[string]string + query func() string + requiredMocks func() + expected Expected + }{ + { + description: "success", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + query: func() string { + url := &url.Values{} + url.Add("page", "1") + url.Add("per_page", "10") + url.Add("sort_by", "created_at") + url.Add("order_by", "asc") + + return url.Encode() + }, + requiredMocks: func() { + svcMock.On( + "ListAPIKeys", + mock.Anything, + &requests.ListAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + }). + Return( + []models.APIKey{ + { + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + UpdatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + ExpiresIn: 0, + }, + }, + 0, + nil, + ). + Once() + }, + expected: Expected{ + body: []models.APIKey{ + { + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + UpdatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + ExpiresIn: 0, + }, + }, + status: http.StatusOK, + }, + }, + { + description: "success when page and per_page are invalid", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + query: func() string { + url := &url.Values{} + url.Add("page", "-1") + url.Add("per_page", "1000") + url.Add("sort_by", "created_at") + url.Add("order_by", "asc") + + return url.Encode() + }, + requiredMocks: func() { + svcMock.On( + "ListAPIKeys", + mock.Anything, + &requests.ListAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 100}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + }). + Return( + []models.APIKey{ + { + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + UpdatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + ExpiresIn: 0, + }, + }, + 0, + nil, + ). + Once() + }, + expected: Expected{ + body: []models.APIKey{ + { + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + UpdatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + ExpiresIn: 0, + }, + }, + status: http.StatusOK, + }, + }, + { + description: "success when order_by is an empty string", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + query: func() string { + url := &url.Values{} + url.Add("page", "1") + url.Add("per_page", "10") + url.Add("sort_by", "created_at") + + return url.Encode() + }, + requiredMocks: func() { + svcMock.On( + "ListAPIKeys", + mock.Anything, + &requests.ListAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "desc"}, + }). + Return( + []models.APIKey{ + { + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + UpdatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + ExpiresIn: 0, + }, + }, + 0, + nil, + ). + Once() + }, + expected: Expected{ + body: []models.APIKey{ + { + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + UpdatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + ExpiresIn: 0, + }, + }, + status: http.StatusOK, + }, + }, + { + description: "success when sort_by is an empty string", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + query: func() string { + url := &url.Values{} + url.Add("page", "1") + url.Add("per_page", "10") + url.Add("order_by", "asc") + + return url.Encode() + }, + requiredMocks: func() { + svcMock.On( + "ListAPIKeys", + mock.Anything, + &requests.ListAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "expires_in", Order: "asc"}, + }). + Return( + []models.APIKey{ + { + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + UpdatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + ExpiresIn: 0, + }, + }, + 0, + nil, + ). + Once() + }, + expected: Expected{ + body: []models.APIKey{ + { + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + UpdatedAt: time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC), + ExpiresIn: 0, + }, + }, + status: http.StatusOK, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + req := httptest.NewRequest(http.MethodGet, "/api/namespaces/api-key?"+tc.query(), nil) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + e := NewRouter(svcMock) + e.ServeHTTP(rec, req) + + require.Equal(t, tc.expected.status, rec.Result().StatusCode) + if tc.expected.body != nil { + responseBody := make([]models.APIKey, 0) + require.NoError(t, json.NewDecoder(rec.Body).Decode(&responseBody)) + require.Equal(t, tc.expected.body, responseBody) + } + }) + } +} + +func TestUpdateAPIKey(t *testing.T) { + type Expected struct { + status int + } + + svcMock := new(servicemock.Service) + + cases := []struct { + description string + name string + headers map[string]string + body map[string]string + requiredMocks func() + expected Expected + }{ + { + description: "fails with api key", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-API-KEY": "b2f7cc0e-d933-4aad-9ab2-b557f2f2554f", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]string{ + "name": "prod", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusForbidden}, + }, + { + description: "fails when role is observer", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "observer", + }, + body: map[string]string{ + "name": "prod", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusForbidden}, + }, + { + description: "fails when role is operator", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "operator", + }, + body: map[string]string{ + "name": "prod", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusForbidden}, + }, + { + description: "fails when name is invalid due to length < 3", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]string{ + "name": "pr", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusBadRequest}, + }, + { + description: "fails when name is invalid due to length > 20", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]string{ + "name": "prodprodprodprodprodprod", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusBadRequest}, + }, + { + description: "fails when name is invalid due to whitespaces", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]string{ + "name": "prod prod", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusBadRequest}, + }, + { + description: "fails when role is invalid", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]string{ + "role": "invalid", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusBadRequest}, + }, + { + description: "succeeds", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + body: map[string]string{ + "name": "prod", + "role": "administrator", + }, + requiredMocks: func() { + svcMock.On( + "UpdateAPIKey", + mock.Anything, + &requests.UpdateAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + CurrentName: "dev", + Name: "prod", + Role: "administrator", + }). + Return(nil). + Once() + }, + expected: Expected{status: http.StatusOK}, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + data, err := json.Marshal(tc.body) + require.NoError(t, err) + + req := httptest.NewRequest(http.MethodPatch, "/api/namespaces/api-key/"+tc.name, strings.NewReader(string(data))) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + e := NewRouter(svcMock) + e.ServeHTTP(rec, req) + + require.Equal(t, tc.expected.status, rec.Result().StatusCode) + }) + } +} + +func TestDeleteAPIKey(t *testing.T) { + type Expected struct { + status int + } + + svcMock := new(servicemock.Service) + + cases := []struct { + description string + name string + headers map[string]string + requiredMocks func() + expected Expected + }{ + { + description: "fails with api key", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-API-KEY": "b2f7cc0e-d933-4aad-9ab2-b557f2f2554f", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusForbidden}, + }, + { + description: "fails when role is observer", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "observer", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusForbidden}, + }, + { + description: "fails when role is operator", + name: "dev", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "operator", + }, + requiredMocks: func() { + }, + expected: Expected{status: http.StatusForbidden}, + }, + { + description: "succeeds", + name: "dev", + headers: map[string]string{ + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + }, + requiredMocks: func() { + svcMock.On( + "DeleteAPIKey", + mock.Anything, + &requests.DeleteAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "dev", + }). + Return(nil). + Once() + }, + expected: Expected{status: http.StatusOK}, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + req := httptest.NewRequest(http.MethodDelete, "/api/namespaces/api-key/"+tc.name, nil) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + e := NewRouter(svcMock) + e.ServeHTTP(rec, req) + + require.Equal(t, tc.expected.status, rec.Result().StatusCode) + }) + } +} diff --git a/api/routes/auth.go b/api/routes/auth.go index 9d405997ef7..83e57140b3b 100644 --- a/api/routes/auth.go +++ b/api/routes/auth.go @@ -4,140 +4,97 @@ import ( "errors" "net/http" "strconv" + "strings" - jwt "github.com/golang-jwt/jwt" - "github.com/labstack/echo/v4" - "github.com/labstack/echo/v4/middleware" - "github.com/mitchellh/mapstructure" "github.com/shellhub-io/shellhub/api/pkg/gateway" errs "github.com/shellhub-io/shellhub/api/routes/errors" svc "github.com/shellhub-io/shellhub/api/services" - client "github.com/shellhub-io/shellhub/pkg/api/internalclient" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/jwttoken" "github.com/shellhub-io/shellhub/pkg/api/requests" - "github.com/shellhub-io/shellhub/pkg/models" ) const ( - AuthRequestURL = "/auth" - AuthDeviceURL = "/devices/auth" - AuthDeviceURLV2 = "/auth/device" - AuthUserURL = "/login" - AuthUserURLV2 = "/auth/user" - + AuthRequestURL = "/auth" + AuthDeviceURL = "/devices/auth" + AuthDeviceURLV2 = "/auth/device" + AuthLocalUserURL = "/login" + AuthLocalUserURLV2 = "/auth/user" AuthUserTokenInternalURL = "/auth/token/:id" //nolint:gosec AuthUserTokenPublicURL = "/auth/token/:tenant" //nolint:gosec - - AuthPublicKeyURL = "/auth/ssh" - AuthMFAURL = "/auth/mfa" -) - -const ( - // AuthRequestUserToken is the type of the token used to authenticate a user. - AuthRequestUserToken = "user" - // AuthRequestDeviceToken is the type of the token used to authenticate a device. - AuthRequestDeviceToken = "device" + AuthPublicKeyURL = "/auth/ssh" + AuthMFAURL = "/auth/mfa" ) -// AuthRequest checks the user and device authentication token. +// AuthRequest is a proxy-level authentication middleware. It decodes a specified +// authentication hash (e.g. JWT tokens and API keys), sets the credentials in +// headers, and redirects to the original endpoint. +// +// The following sequential diagram represents the authentication pipeline: // -// This route is a special route and it is called every time a user tries to access a route which requires -// authentication. It gets the JWT token sent, unwraps it and sets the information, like tenant, user, etc., as headers -// of the response to be got in the subsequent through the [gateway.Context]. +// +------+ +----------------+ +----------+ +// | User | | /internal/auth | | /api/... | +// +------+ +----------------+ +----------+ +// | | | +// | Send Request | | +// |------------->| | +// | | Extract and decode hash | +// | | Set auth headers | +// | |------------------------>| +// | | | Execute the target endpoint +// | | +// | Send response back to the user | +// |<---------------------------------------| +// +// If the authentication fails for any reason, it must return the failed status +// without redirecting the request. A token can be use to authenticate either a +// device or a user. func (h *Handler) AuthRequest(c gateway.Context) error { - token, ok := c.Get(middleware.DefaultJWTConfig.ContextKey).(*jwt.Token) - if !ok { - return svc.ErrTypeAssertion - } - - rawClaims, ok := token.Claims.(*jwt.MapClaims) - if !ok { - return svc.ErrTypeAssertion - } - - // setHeader sets a reader to the HTTP response to be read in the subsequent request. - setHeader := func(response gateway.Context, key string, value string) { - response.Response().Header().Set(key, value) - } - - // decodeMap parses the JWT claims into a struct. - decodeMap := func(input *jwt.MapClaims, output any) error { - config := &mapstructure.DecoderConfig{ - TagName: "json", - Metadata: nil, - Result: output, - } - - decoder, err := mapstructure.NewDecoder(config) + if key := c.Request().Header.Get("X-API-Key"); key != "" { + apiKey, err := h.service.AuthAPIKey(c.Ctx(), key) if err != nil { return err } - return decoder.Decode(input) - } - - switch claims := (*rawClaims)["claims"]; claims { - case AuthRequestUserToken: - // A [AuthRequestUserToken] is a token used to authenticate a user. - // This kind of token can have its "namespace" as a empty value, indicating that is a "user" token. Its a kind - // of sub-token, what allows the logged user to change its information, but does not allow to change the any - // other namespace information. - - var claims models.UserAuthClaims - if err := decodeMap(rawClaims, &claims); err != nil { - return err - } - - args := c.QueryParam("args") - if args != "skip" && claims.Tenant != "" { - // This forces any no cached token to be invalid, even if it not not expired. - if ok, err := h.service.AuthIsCacheToken(c.Ctx(), claims.Tenant, claims.ID); err != nil || !ok { - return svc.NewErrAuthUnathorized(err) - } - } + c.Response().Header().Set("X-Tenant-ID", apiKey.TenantID) + c.Response().Header().Set("X-Role", apiKey.Role.String()) + c.Response().Header().Set("X-API-KEY", key) - MFA, err := h.service.AuthMFA(c.Ctx(), claims.ID) - if err != nil { - return err - } + return c.NoContent(http.StatusOK) + } - if MFA { - if claims.MFA.Enable != MFA { - return svc.NewErrAuthUnathorized(errors.New("this token doesn't match the user MFA status")) + bearerToken := c.Request().Header.Get("Authorization") + claims, err := jwttoken.ClaimsFromBearerToken(h.service.PublicKey(), bearerToken) + if err != nil { + return c.NoContent(http.StatusUnauthorized) + } + + switch claims := claims.(type) { + case *authorizer.DeviceClaims: + c.Response().Header().Set("X-Device-UID", claims.UID) + c.Response().Header().Set("X-Tenant-ID", claims.TenantID) + case *authorizer.UserClaims: + // As the role is a dynamic attribute, and a JWT token must be stateless, we need to retrieve the role + // every time this middleware is invoked (generally from the cache). + if claims.TenantID != "" { + role, err := h.service.GetUserRole(c.Ctx(), claims.TenantID, claims.ID) + if err != nil { + return err } - // NOTICE: when [args] is "skip", it means that route may be accessed by a unvalidated token, even when MFA - // on user is enable. It is used by the route that validate the OTP from the user's OTP APP, avoiding extra - // logic in a middleware apart. When that is true, only the user's ID and username are send to the next - // route; other values are set for its default value. - if args != "skip" && !claims.MFA.Validate { - return svc.NewErrAuthUnathorized(errors.New("this token isn't validated")) - } + claims.Role = authorizer.RoleFromString(role) } - // Extract datas of user from JWT - c.Response().Header().Set("X-Tenant-ID", claims.Tenant) - c.Response().Header().Set("X-Username", claims.Username) c.Response().Header().Set("X-ID", claims.ID) - c.Response().Header().Set("X-Role", claims.Role) - c.Response().Header().Set("X-MFA", strconv.FormatBool(claims.MFA.Enable)) - c.Response().Header().Set("X-Validate-MFA", strconv.FormatBool(claims.MFA.Validate)) - - return c.NoContent(http.StatusOK) - case AuthRequestDeviceToken: - var claims models.DeviceAuthClaims - - if err := decodeMap(rawClaims, &claims); err != nil { - return err - } - - // Extract device UID from JWT and set it into the header. - setHeader(c, client.DeviceUIDHeader, claims.UID) - - return c.NoContent(http.StatusOK) + c.Response().Header().Set("X-Username", claims.Username) + c.Response().Header().Set("X-Tenant-ID", claims.TenantID) + c.Response().Header().Set("X-Role", claims.Role.String()) + c.Response().Header().Set("X-Admin", strconv.FormatBool(claims.Admin)) default: - - return svc.NewErrAuthUnathorized(nil) + return c.NoContent(http.StatusUnauthorized) } + + return c.NoContent(http.StatusOK) } func (h *Handler) AuthDevice(c gateway.Context) error { @@ -146,17 +103,19 @@ func (h *Handler) AuthDevice(c gateway.Context) error { return err } - if err := c.Validate(&req); err != nil { - return err + // NOTE: The previous version of the Agent in Connector mode could send the container's name without converting + // the dot character to an underscore, which is not supported in ShellHub device naming. To prevent validation + // errors with this old version, we are implementing a server-side change to handle this conversion. + // TODO: This modification could be in the service layer. + if strings.Contains(req.Hostname, ".") { + req.Hostname = strings.ReplaceAll(req.Hostname, ".", "_") } - ip := c.Request().Header.Get("X-Real-IP") - res, err := h.service.AuthDevice(c.Ctx(), req, ip) - if err != nil { + if err := c.Validate(&req); err != nil { return err } - err = h.service.SetDevicePosition(c.Ctx(), models.UID(res.UID), ip) + res, err := h.service.AuthDevice(c.Ctx(), req) if err != nil { return err } @@ -164,79 +123,53 @@ func (h *Handler) AuthDevice(c gateway.Context) error { return c.JSON(http.StatusOK, res) } -func (h *Handler) AuthUser(c gateway.Context) error { - var req requests.UserAuth +func (h *Handler) AuthLocalUser(c gateway.Context) error { + req := new(requests.AuthLocalUser) - if err := c.Bind(&req); err != nil { + if err := c.Bind(req); err != nil { return err } - if err := c.Validate(&req); err != nil { + if err := c.Validate(req); err != nil { return err } - res, err := h.service.AuthUser(c.Ctx(), &models.UserAuthRequest{ - Identifier: models.UserAuthIdentifier(req.Username), - Password: req.Password, - }) - if err != nil { - if errors.Is(err, svc.ErrUserNotFound) { - return errs.NewErrUnauthorized(err) - } + res, lockout, mfaToken, err := h.service.AuthLocalUser(c.Ctx(), req, c.RealIP()) + c.Response().Header().Set("X-Account-Lockout", strconv.FormatInt(lockout, 10)) + c.Response().Header().Set("X-MFA-Token", mfaToken) - return err + if lockout > 0 { + return c.NoContent(http.StatusTooManyRequests) } - return c.JSON(http.StatusOK, res) -} - -func (h *Handler) AuthUserInfo(c gateway.Context) error { - username := c.Request().Header.Get("X-Username") - tenant := c.Request().Header.Get("X-Tenant-ID") - token := c.Request().Header.Get(echo.HeaderAuthorization) - - res, err := h.service.AuthUserInfo(c.Ctx(), username, tenant, token) - if err != nil { - return err + if mfaToken != "" { + return c.NoContent(http.StatusUnauthorized) } - return c.JSON(http.StatusOK, res) -} - -func (h *Handler) AuthGetToken(c gateway.Context) error { - var req requests.AuthTokenGet - - if err := c.Bind(&req); err != nil { - return err - } - - if err := c.Validate(&req); err != nil { - return err - } - - res, err := h.service.AuthGetToken(c.Ctx(), req.ID, req.MFA) if err != nil { - return err + switch { + case errors.Is(err, svc.ErrUserNotFound): + return errs.NewErrUnauthorized(err) + default: + return err + } } return c.JSON(http.StatusOK, res) } -func (h *Handler) AuthSwapToken(c gateway.Context) error { - var req requests.AuthTokenSwap - if err := c.Bind(&req); err != nil { +func (h *Handler) CreateUserToken(c gateway.Context) error { + req := new(requests.CreateUserToken) + + if err := c.Bind(req); err != nil { return err } - if err := c.Validate(&req); err != nil { + if err := c.Validate(req); err != nil { return err } - var id string - if v := c.ID(); v != nil { - id = v.ID - } - res, err := h.service.AuthSwapToken(c.Ctx(), id, req.Tenant) + res, err := h.service.CreateUserToken(c.Ctx(), req) if err != nil { return err } @@ -261,19 +194,3 @@ func (h *Handler) AuthPublicKey(c gateway.Context) error { return c.JSON(http.StatusOK, res) } - -func AuthMiddleware(next echo.HandlerFunc) echo.HandlerFunc { - return func(c echo.Context) error { - ctx, ok := c.Get("ctx").(*gateway.Context) - if !ok { - return svc.ErrTypeAssertion - } - jwt := middleware.JWTWithConfig(middleware.JWTConfig{ //nolint:staticcheck - Claims: &jwt.MapClaims{}, - SigningKey: ctx.Service().(svc.Service).PublicKey(), - SigningMethod: "RS256", - }) - - return jwt(next)(c) - } -} diff --git a/api/routes/auth_test.go b/api/routes/auth_test.go index 6ea2b0728cb..126485852ad 100644 --- a/api/routes/auth_test.go +++ b/api/routes/auth_test.go @@ -4,106 +4,80 @@ import ( "crypto/rand" "crypto/rsa" "encoding/json" - "fmt" "io" "net/http" "net/http/httptest" "strings" "testing" - "time" - "github.com/golang-jwt/jwt/v4" - "github.com/shellhub-io/shellhub/api/pkg/gateway" - "github.com/shellhub-io/shellhub/api/pkg/guard" svc "github.com/shellhub-io/shellhub/api/services" "github.com/shellhub-io/shellhub/api/services/mocks" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/jwttoken" "github.com/shellhub-io/shellhub/pkg/api/requests" - "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" gomock "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" ) -func TestAuthGetToken(t *testing.T) { +func TestAuthDevice(t *testing.T) { mock := new(mocks.Service) type Expected struct { - expectedSession *models.UserAuthResponse - expectedStatus int + expectedResponse *models.DeviceAuthResponse + expectedStatus int } cases := []struct { title string - id requests.AuthTokenGet + requestBody *requests.DeviceAuth requiredMocks func() expected Expected }{ { - title: "fails when validate fails", - id: requests.AuthTokenGet{UserParam: requests.UserParam{ID: ""}}, - requiredMocks: func() {}, + title: "success when device has a preferred hostname and it is uppercase", + requestBody: &requests.DeviceAuth{ + Info: &requests.DeviceInfo{ + ID: "device_id", + PrettyName: "Device Name", + Version: "1.0", + Arch: "amd64", + Platform: "Linux", + }, + Hostname: "TEST", + PublicKey: "your_public_key", + TenantID: "your_tenant_id", + }, + requiredMocks: func() { + mock.On("AuthDevice", gomock.Anything, gomock.AnythingOfType("requests.DeviceAuth")).Return(&models.DeviceAuthResponse{}, nil).Once() + }, expected: Expected{ - expectedSession: nil, - expectedStatus: http.StatusBadRequest, + expectedResponse: &models.DeviceAuthResponse{}, + expectedStatus: http.StatusOK, }, }, { - title: "success when trying to get a token", - id: requests.AuthTokenGet{UserParam: requests.UserParam{ID: "id"}}, + title: "success when device has a preferred hostname and it is lowercase", + requestBody: &requests.DeviceAuth{ + Info: &requests.DeviceInfo{ + ID: "device_id", + PrettyName: "Device Name", + Version: "1.0", + Arch: "amd64", + Platform: "Linux", + }, + Hostname: "test", + PublicKey: "your_public_key", + TenantID: "your_tenant_id", + }, requiredMocks: func() { - mock.On("AuthGetToken", gomock.Anything, "id", false).Return(&models.UserAuthResponse{}, nil).Once() + mock.On("AuthDevice", gomock.Anything, gomock.AnythingOfType("requests.DeviceAuth")).Return(&models.DeviceAuthResponse{}, nil).Once() }, expected: Expected{ - expectedSession: &models.UserAuthResponse{}, - expectedStatus: http.StatusOK, + expectedResponse: &models.DeviceAuthResponse{}, + expectedStatus: http.StatusOK, }, }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks() - - jsonData, err := json.Marshal(tc.id) - if err != nil { - assert.NoError(t, err) - } - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/internal/auth/token/%s", jsonData), strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-ID", string(jsonData)) - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) - - var session *models.UserAuthResponse - if err := json.NewDecoder(rec.Result().Body).Decode(&session); err != nil { - assert.ErrorIs(t, io.EOF, err) - } - - assert.Equal(t, tc.expected.expectedSession, session) - - mock.AssertExpectations(t) - }) - } -} - -func TestAuthDevice(t *testing.T) { - mock := new(mocks.Service) - - type Expected struct { - expectedResponse *models.DeviceAuthResponse - expectedStatus int - } - cases := []struct { - title string - requestBody *requests.DeviceAuth - requiredMocks func() - expected Expected - }{ { title: "success when try auth a device", requestBody: &requests.DeviceAuth{ @@ -121,7 +95,7 @@ func TestAuthDevice(t *testing.T) { TenantID: "your_tenant_id", }, requiredMocks: func() { - mock.On("AuthDevice", gomock.Anything, gomock.AnythingOfType("requests.DeviceAuth"), "").Return(&models.DeviceAuthResponse{}, nil).Once() + mock.On("AuthDevice", gomock.Anything, gomock.AnythingOfType("requests.DeviceAuth")).Return(&models.DeviceAuthResponse{}, nil).Once() mock.On("SetDevicePosition", gomock.Anything, models.UID(""), "").Return(nil).Once() }, expected: Expected{ @@ -169,243 +143,322 @@ func TestAuthDevice(t *testing.T) { } } -func TestAuthUser(t *testing.T) { +func TestAuthLocalUser(t *testing.T) { mock := new(mocks.Service) type Expected struct { - expectedResponse *models.UserAuthResponse - expectedStatus int + body *models.UserAuthResponse + headers map[string]string + status int } cases := []struct { - title string - requestBody *models.UserAuthRequest - requiredMocks func() - expected Expected + description string + req *requests.AuthLocalUser + mocks func() + expected Expected }{ { - title: "success when try to auth a user", - requestBody: &models.UserAuthRequest{ - Identifier: "testuser", - Password: "testpassword", - }, - requiredMocks: func() { - req := &models.UserAuthRequest{ - Identifier: "testuser", - Password: "testpassword", - } - - mock.On("AuthUser", gomock.Anything, req).Return(&models.UserAuthResponse{}, nil).Once() + description: "fails when the identifier is empty", + req: &requests.AuthLocalUser{ + Identifier: "", + Password: "secret", }, + mocks: func() {}, expected: Expected{ - expectedResponse: &models.UserAuthResponse{}, - expectedStatus: http.StatusOK, + body: nil, + headers: map[string]string{}, + status: http.StatusBadRequest, }, }, { - title: "fails when try to validate a username", - requestBody: &models.UserAuthRequest{ - Identifier: "", - Password: "testpassword", + description: "fails when the password is empty", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "", }, - requiredMocks: func() {}, + mocks: func() {}, expected: Expected{ - expectedResponse: nil, - expectedStatus: http.StatusBadRequest, + body: nil, + headers: map[string]string{}, + status: http.StatusBadRequest, }, }, { - title: "fails when try to validate a password", - requestBody: &models.UserAuthRequest{ - Identifier: "username", - Password: "", + description: "fails when the user is not found", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, + mocks: func() { + mock. + On("AuthLocalUser", gomock.Anything, &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, gomock.Anything). + Return(nil, int64(0), "", svc.ErrUserNotFound). + Once() }, - requiredMocks: func() {}, expected: Expected{ - expectedResponse: nil, - expectedStatus: http.StatusBadRequest, + body: nil, + headers: map[string]string{}, + status: http.StatusUnauthorized, }, }, { - title: "fail when try to auth a user", - requestBody: &models.UserAuthRequest{ - Identifier: "username", - Password: "password", - }, - requiredMocks: func() { - mock.On("AuthUser", gomock.Anything, gomock.Anything).Return(nil, svc.ErrAuthUnathorized).Once() + description: "fails when the password is wrong", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, + mocks: func() { + mock. + On("AuthLocalUser", gomock.Anything, &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, gomock.Anything). + Return(nil, int64(0), "", svc.ErrAuthUnathorized). + Once() }, expected: Expected{ - expectedResponse: nil, - expectedStatus: http.StatusUnauthorized, + body: nil, + headers: map[string]string{ + "X-Account-Lockout": "0", + "X-MFA-Token": "", + }, + status: http.StatusUnauthorized, }, }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks() - - jsonData, err := json.Marshal(tc.requestBody) - if err != nil { - assert.NoError(t, err) - } - - req := httptest.NewRequest(http.MethodPost, "/api/auth/user", strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) - - if tc.expected.expectedResponse != nil { - var response models.UserAuthResponse - if err := json.NewDecoder(rec.Result().Body).Decode(&response); err != nil { - assert.ErrorIs(t, io.EOF, err) - } - - assert.Equal(t, tc.expected.expectedResponse, &response) - } - }) - } -} - -func TestAuthUserInfo(t *testing.T) { - mock := new(mocks.Service) - - type Expected struct { - expectedResponse *models.UserAuthResponse - expectedStatus int - } - - cases := []struct { - title string - requestHeaders map[string]string - requiredMocks func() - expected Expected - }{ { - title: "success when try to auth a user info", - requestHeaders: map[string]string{ - "X-Username": "user", - "X-Tenant-ID": "tenant", - }, - requiredMocks: func() { - mock.On("AuthUserInfo", gomock.Anything, "user", "tenant", gomock.Anything).Return(&models.UserAuthResponse{}, nil).Once() + description: "fails when reaching the attempt limits", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, + mocks: func() { + mock. + On("AuthLocalUser", gomock.Anything, &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, gomock.Anything). + Return(nil, int64(1711176851), "", svc.ErrAuthUnathorized). + Once() }, expected: Expected{ - expectedResponse: &models.UserAuthResponse{}, - expectedStatus: http.StatusOK, + body: nil, + headers: map[string]string{ + "X-Account-Lockout": "1711176851", + "X-MFA-Token": "", + }, + status: http.StatusTooManyRequests, }, }, { - title: "fails when try to auth a user info", - requestHeaders: map[string]string{ - "X-Username": "user", - "X-Tenant-ID": "tenant", + description: "fails when mfa is enable", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, + mocks: func() { + mock. + On("AuthLocalUser", gomock.Anything, &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, gomock.Anything). + Return(nil, int64(0), "00000000-0000-4000-0000-000000000000", svc.ErrAuthUnathorized). + Once() }, - requiredMocks: func() { - mock.On("AuthUserInfo", gomock.Anything, "user", "tenant", gomock.Anything).Return(nil, svc.ErrAuthUnathorized).Once() + expected: Expected{ + body: nil, + headers: map[string]string{ + "X-Account-Lockout": "0", + "X-MFA-Token": "00000000-0000-4000-0000-000000000000", + }, + status: http.StatusUnauthorized, + }, + }, + { + description: "success when try to auth a user", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, + mocks: func() { + mock. + On("AuthLocalUser", gomock.Anything, &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, gomock.Anything). + Return(&models.UserAuthResponse{ + ID: "65fdd16b5f62f93184ec8a39", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000000", + Token: "not-empty", + }, int64(0), "", nil). + Once() }, expected: Expected{ - expectedResponse: nil, - expectedStatus: http.StatusUnauthorized, + body: &models.UserAuthResponse{ + ID: "65fdd16b5f62f93184ec8a39", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000000", + Token: "not-empty", + }, + headers: map[string]string{ + "X-Account-Lockout": "0", + "X-MFA-Token": "", + }, + status: http.StatusOK, }, }, } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks() + t.Run(tc.description, func(t *testing.T) { + tc.mocks() - req := httptest.NewRequest(http.MethodGet, "/api/auth/user", nil) - req.Header.Set("Content-Type", "application/json") - - for key, value := range tc.requestHeaders { - req.Header.Set(key, value) + jsonData, err := json.Marshal(tc.req) + if err != nil { + assert.NoError(t, err) } + req := httptest.NewRequest(http.MethodPost, "/api/auth/user", strings.NewReader(string(jsonData))) + req.Header.Set("Content-Type", "application/json") rec := httptest.NewRecorder() e := NewRouter(mock) e.ServeHTTP(rec, req) - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) + var body *models.UserAuthResponse - if tc.expected.expectedResponse != nil { - var response models.UserAuthResponse - if err := json.NewDecoder(rec.Result().Body).Decode(&response); err != nil { + if tc.expected.body != nil { + if err := json.NewDecoder(rec.Result().Body).Decode(&body); err != nil { assert.ErrorIs(t, io.EOF, err) } + } - assert.Equal(t, tc.expected.expectedResponse, &response) + assert.Equal(t, tc.expected.body, body) + assert.Equal(t, tc.expected.status, rec.Result().StatusCode) + for k, v := range tc.expected.headers { + assert.Equal(t, v, rec.Result().Header.Get(k)) } }) } } -func TestAuthSwapToken(t *testing.T) { - mock := new(mocks.Service) +func TestCreateUserToken(t *testing.T) { + svcMock := new(mocks.Service) type Expected struct { - expectedResponse *models.UserAuthResponse - expectedStatus int + body *models.UserAuthResponse + status int } cases := []struct { - title string - requestBody string - requiredMocks func() - expected Expected + description string + tenantID string + headers map[string]string + mocks func() + expected Expected }{ { - title: "success when try to swap token", - requestBody: "tenant", - requiredMocks: func() { - mock.On("AuthSwapToken", gomock.Anything, "id", "tenant").Return(&models.UserAuthResponse{}, nil).Once() + description: "success without tenant_id", + tenantID: "", + headers: map[string]string{"X-ID": "000000000000000000000000"}, + mocks: func() { + svcMock. + On("CreateUserToken", gomock.Anything, &requests.CreateUserToken{ + UserID: "000000000000000000000000", + TenantID: "", + }). + Return(&models.UserAuthResponse{ + ID: "000000000000000000000000", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000000", + Token: "not-empty", + }, nil). + Once() }, expected: Expected{ - expectedResponse: &models.UserAuthResponse{}, - expectedStatus: http.StatusOK, + body: &models.UserAuthResponse{ + ID: "000000000000000000000000", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000000", + Token: "not-empty", + }, + status: http.StatusOK, }, }, { - title: "fails when try to swap a token", - requestBody: "", - requiredMocks: func() {}, + description: "success with tenant_id", + tenantID: "00000000-0000-4000-0000-000000000001", + headers: map[string]string{"X-ID": "000000000000000000000000"}, + mocks: func() { + svcMock. + On("CreateUserToken", gomock.Anything, &requests.CreateUserToken{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000001", + }). + Return(&models.UserAuthResponse{ + ID: "000000000000000000000000", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000001", + Token: "not-empty", + }, nil). + Once() + }, expected: Expected{ - expectedResponse: nil, - expectedStatus: http.StatusNotFound, + body: &models.UserAuthResponse{ + ID: "000000000000000000000000", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000001", + Token: "not-empty", + }, + status: http.StatusOK, }, }, } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks() - - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/auth/token/%s", tc.requestBody), nil) - req.Header.Set("Content-Type", "application/json") - rec := httptest.NewRecorder() + t.Run(tc.description, func(t *testing.T) { + tc.mocks() + + req := new(http.Request) + if tc.tenantID == "" { + req = httptest.NewRequest(http.MethodGet, "/api/auth/user", nil) + } else { + req = httptest.NewRequest(http.MethodGet, "/api/auth/token/"+tc.tenantID, nil) + } - e := NewRouter(mock) - c := gateway.NewContext(mock, e.NewContext(req, rec)) - c.Request().Header.Set("X-ID", "id") + for k, v := range tc.headers { + req.Header.Set(k, v) + } + rec := httptest.NewRecorder() + e := NewRouter(svcMock) e.ServeHTTP(rec, req) - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) - - if tc.expected.expectedResponse != nil { - var response models.UserAuthResponse - if err := json.NewDecoder(rec.Result().Body).Decode(&response); err != nil { - assert.ErrorIs(t, io.EOF, err) + body := new(models.UserAuthResponse) + if tc.expected.body != nil { + if err := json.NewDecoder(rec.Result().Body).Decode(&body); err != nil { + require.ErrorIs(t, io.EOF, err) } - - assert.Equal(t, tc.expected.expectedResponse, &response) } + + assert.Equal(t, tc.expected.body, body) + assert.Equal(t, tc.expected.status, rec.Result().StatusCode) }) } } @@ -483,76 +536,105 @@ func TestAuthPublicKey(t *testing.T) { } } -func TestAuthRequest(t *testing.T) { - mock := new(mocks.Service) +func TestHandler_AuthRequest_with_authorization_header(t *testing.T) { + type Expected struct { + status int + headers map[string]string + } + svcMock := new(mocks.Service) privateKey, err := rsa.GenerateKey(rand.Reader, 2048) - assert.NoError(t, err) - - token := jwt.NewWithClaims(jwt.SigningMethodRS256, models.UserAuthClaims{ - Username: "username", - Admin: true, - Tenant: "tenant", - Role: "role", - ID: "id", - AuthClaims: models.AuthClaims{ - Claims: "user", - }, - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(clock.Now().Add(time.Hour * 72)), - }, - }) + require.NoError(t, err) - type Expected struct { - expectedStatus int - } cases := []struct { - title string + description string + token func() (string, error) requiredMocks func() expected Expected }{ { - title: "success when trying to verify token authorization", + description: "failed when token is invalid", + token: func() (string, error) { + return "", nil + }, requiredMocks: func() { - mock.On("PublicKey").Return(&privateKey.PublicKey).Once() - mock.On("AuthIsCacheToken", gomock.Anything, "tenant", "id").Return(true, nil).Once() - mock.On("AuthMFA", gomock.Anything, "id").Return(false, nil).Once() + svcMock.On("PublicKey").Return(&privateKey.PublicKey).Once() }, expected: Expected{ - expectedStatus: http.StatusOK, + status: 401, + headers: map[string]string{}, }, }, { - title: "fails when token dont have cache", + description: "succeeds to authenticate a user", + token: func() (string, error) { + claims := authorizer.UserClaims{ + ID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: authorizer.RoleOwner, + Username: "john_doe", + } + + return jwttoken.EncodeUserClaims(claims, privateKey) + }, requiredMocks: func() { - mock.On("PublicKey").Return(&privateKey.PublicKey).Once() - mock.On("AuthIsCacheToken", gomock.Anything, "tenant", "id").Return(false, nil).Once() + svcMock.On("PublicKey").Return(&privateKey.PublicKey).Once() + svcMock.On("GetUserRole", gomock.Anything, "00000000-0000-4000-0000-000000000000", "000000000000000000000000").Return("owner", nil).Once() }, expected: Expected{ - expectedStatus: http.StatusUnauthorized, + status: 200, + headers: map[string]string{ + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": authorizer.RoleOwner.String(), + "X-Username": "john_doe", + }, + }, + }, + { + description: "succeeds to authenticate a device", + token: func() (string, error) { + claims := authorizer.DeviceClaims{ + UID: "0000000000000000000000000000000000000000000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + } + + return jwttoken.EncodeDeviceClaims(claims, privateKey) + }, + requiredMocks: func() { + svcMock.On("PublicKey").Return(&privateKey.PublicKey).Once() + }, + expected: Expected{ + status: 200, + headers: map[string]string{ + "X-Device-UID": "0000000000000000000000000000000000000000000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, }, }, } + for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { + t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() req := httptest.NewRequest(http.MethodGet, "/internal/auth", nil) - req.Header.Set("Content-Type", "application/json") - - tokenStr, err := token.SignedString(privateKey) - assert.NoError(t, err) - req.Header.Add("Authorization", "Bearer "+tokenStr) + token, err := tc.token() + require.NoError(t, err) - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", token) rec := httptest.NewRecorder() - e := NewRouter(mock) + e := NewRouter(svcMock) e.ServeHTTP(rec, req) - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) + require.Equal(t, tc.expected.status, rec.Result().StatusCode) + for k, v := range tc.expected.headers { + require.Equal(t, rec.Result().Header.Get(k), v) + } }) } } diff --git a/api/routes/device.go b/api/routes/device.go index 6e2f394971e..4b346326aee 100644 --- a/api/routes/device.go +++ b/api/routes/device.go @@ -1,79 +1,98 @@ package routes import ( - "encoding/base64" - "encoding/json" "net/http" "strconv" "github.com/shellhub-io/shellhub/api/pkg/gateway" - "github.com/shellhub-io/shellhub/api/pkg/guard" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" ) const ( - GetDeviceListURL = "/devices" - GetDeviceURL = "/devices/:uid" - GetDeviceByPublicURLAddress = "/devices/public/:address" - DeleteDeviceURL = "/devices/:uid" - RenameDeviceURL = "/devices/:uid" - OfflineDeviceURL = "/devices/:uid/offline" - HeartbeatDeviceURL = "/devices/:uid/heartbeat" - LookupDeviceURL = "/lookup" - UpdateDeviceStatusURL = "/devices/:uid/:status" - CreateTagURL = "/devices/:uid/tags" // Add a tag to a device. - UpdateTagURL = "/devices/:uid/tags" // Update device's tags with a new set. - RemoveTagURL = "/devices/:uid/tags/:tag" // Delete a tag from a device. - UpdateDevice = "/devices/:uid" + GetDeviceListURL = "/devices" + GetDeviceURL = "/devices/:uid" + ResolveDeviceURL = "/devices/resolve" + DeleteDeviceURL = "/devices/:uid" + RenameDeviceURL = "/devices/:uid" + OfflineDeviceURL = "/devices/:uid/offline" + LookupDeviceURL = "/device/lookup" + UpdateDeviceStatusURL = "/devices/:uid/:status" + UpdateDevice = "/devices/:uid" ) const ( ParamDeviceID = "uid" ParamDeviceStatus = "status" - ParamTagName = "name" ) -type filterQuery struct { - Filter string `query:"filter"` - Status models.DeviceStatus `query:"status"` - SortBy string `query:"sort_by"` - OrderBy string `query:"order_by"` - paginator.Query -} - func (h *Handler) GetDeviceList(c gateway.Context) error { - query := filterQuery{} - if err := c.Bind(&query); err != nil { + req := new(requests.DeviceList) + + if err := c.Bind(req); err != nil { return err } - query.Normalize() + req.Paginator.Normalize() + req.Sorter.Normalize() - raw, err := base64.StdEncoding.DecodeString(query.Filter) - if err != nil { + if err := req.Filters.Unmarshal(); err != nil { return err } - var filter []models.Filter - if err := json.Unmarshal(raw, &filter); len(raw) > 0 && err != nil { - return err - } + if c.QueryParam("connector") != "" { + filter := []query.Filter{ + { + Type: query.FilterTypeProperty, + Params: &query.FilterProperty{ + Name: "info.platform", + Operator: "eq", + Value: "connector", + }, + }, + { + Type: query.FilterTypeOperator, + Params: &query.FilterOperator{ + Name: "and", + }, + }, + } - var tenant string - if c.Tenant() != nil { - tenant = c.Tenant().ID + req.Filters.Data = append(req.Filters.Data, filter...) + } else { + filter := []query.Filter{ + { + Type: query.FilterTypeProperty, + Params: &query.FilterProperty{ + Name: "info.platform", + Operator: "ne", + Value: "connector", + }, + }, + { + Type: query.FilterTypeOperator, + Params: &query.FilterOperator{ + Name: "and", + }, + }, + } + + req.Filters.Data = append(req.Filters.Data, filter...) } - devices, count, err := h.service.ListDevices(c.Ctx(), tenant, query.Query, filter, query.Status, query.SortBy, query.OrderBy) - if err != nil { + if err := c.Validate(req); err != nil { return err } + res, count, err := h.service.ListDevices(c.Ctx(), req) c.Response().Header().Set("X-Total-Count", strconv.Itoa(count)) - return c.JSON(http.StatusOK, devices) + if err != nil { + return err + } + + return c.JSON(http.StatusOK, res) } func (h *Handler) GetDevice(c gateway.Context) error { @@ -94,8 +113,8 @@ func (h *Handler) GetDevice(c gateway.Context) error { return c.JSON(http.StatusOK, device) } -func (h *Handler) GetDeviceByPublicURLAddress(c gateway.Context) error { - var req requests.DevicePublicURLAddress +func (h *Handler) ResolveDevice(c gateway.Context) error { + var req requests.ResolveDevice if err := c.Bind(&req); err != nil { return err } @@ -104,12 +123,12 @@ func (h *Handler) GetDeviceByPublicURLAddress(c gateway.Context) error { return err } - url, err := h.service.GetDeviceByPublicURLAddress(c.Ctx(), req.PublicURLAddress) + device, err := h.service.ResolveDevice(c.Ctx(), &req) if err != nil { return err } - return c.JSON(http.StatusOK, url) + return c.JSON(http.StatusOK, device) } func (h *Handler) DeleteDevice(c gateway.Context) error { @@ -127,12 +146,7 @@ func (h *Handler) DeleteDevice(c gateway.Context) error { tenant = c.Tenant().ID } - err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.Remove, func() error { - err := h.service.DeleteDevice(c.Ctx(), models.UID(req.UID), tenant) - - return err - }) - if err != nil { + if err := h.service.DeleteDevice(c.Ctx(), models.UID(req.UID), tenant); err != nil { return err } @@ -154,12 +168,7 @@ func (h *Handler) RenameDevice(c gateway.Context) error { tenant = c.Tenant().ID } - err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.Rename, func() error { - err := h.service.RenameDevice(c.Ctx(), models.UID(req.UID), req.Name, tenant) - - return err - }) - if err != nil { + if err := h.service.RenameDevice(c.Ctx(), models.UID(req.UID), req.Name, tenant); err != nil { return err } @@ -176,7 +185,7 @@ func (h *Handler) OfflineDevice(c gateway.Context) error { return err } - if err := h.service.OffineDevice(c.Ctx(), models.UID(req.UID), false); err != nil { + if err := h.service.OfflineDevice(c.Ctx(), models.UID(req.UID)); err != nil { return err } @@ -193,7 +202,7 @@ func (h *Handler) LookupDevice(c gateway.Context) error { return err } - device, err := h.service.LookupDevice(c.Ctx(), req.Domain, req.Name) + device, err := h.service.LookupDevice(c.Ctx(), req.TenantID, req.Name) if err != nil { return err } @@ -202,105 +211,29 @@ func (h *Handler) LookupDevice(c gateway.Context) error { } func (h *Handler) UpdateDeviceStatus(c gateway.Context) error { - var req requests.DeviceUpdateStatus - if err := c.Bind(&req); err != nil { - return err - } - - if err := c.Validate(&req); err != nil { - return err - } - - var tenant string - if c.Tenant() != nil { - tenant = c.Tenant().ID - } - - status := map[string]models.DeviceStatus{ - "accept": models.DeviceStatusAccepted, - "reject": models.DeviceStatusRejected, - "pending": models.DeviceStatusPending, - "unused": models.DeviceStatusUnused, - } - err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.Accept, func() error { - err := h.service.UpdateDeviceStatus(c.Ctx(), tenant, models.UID(req.UID), status[req.Status]) - - return err - }) - if err != nil { - return err - } - - return c.NoContent(http.StatusOK) -} - -func (h *Handler) HeartbeatDevice(c gateway.Context) error { - var req requests.DeviceHeartbeat - if err := c.Bind(&req); err != nil { - return err - } - - if err := c.Validate(&req); err != nil { - return err - } - - return h.service.DeviceHeartbeat(c.Ctx(), models.UID(req.UID)) -} - -func (h *Handler) CreateDeviceTag(c gateway.Context) error { - var req requests.DeviceCreateTag - if err := c.Bind(&req); err != nil { - return err - } - - if err := c.Validate(&req); err != nil { - return err - } - - err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.CreateTag, func() error { - return h.service.CreateDeviceTag(c.Ctx(), models.UID(req.UID), req.Tag) - }) - if err != nil { - return err - } - - return c.NoContent(http.StatusOK) -} + req := new(requests.DeviceUpdateStatus) -func (h *Handler) RemoveDeviceTag(c gateway.Context) error { - var req requests.DeviceRemoveTag - if err := c.Bind(&req); err != nil { - return err - } - - if err := c.Validate(&req); err != nil { + if err := c.Bind(req); err != nil { return err } - err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.RemoveTag, func() error { - return h.service.RemoveDeviceTag(c.Ctx(), models.UID(req.UID), req.Tag) - }) - if err != nil { - return err + // TODO: Remove this legacy status mapping in API v2. + // This mapping exists solely for backward compatibility with API consumers + // that were sending string values before the device status refactor. + status := map[string]string{ + "accept": string(models.DeviceStatusAccepted), + "reject": string(models.DeviceStatusRejected), + "pending": string(models.DeviceStatusPending), + "unused": string(models.DeviceStatusUnused), } - return c.NoContent(http.StatusOK) -} + req.Status = status[req.Status] -func (h *Handler) UpdateDeviceTag(c gateway.Context) error { - var req requests.DeviceUpdateTag - if err := c.Bind(&req); err != nil { + if err := c.Validate(req); err != nil { return err } - if err := c.Validate(&req); err != nil { - return err - } - - err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.UpdateTag, func() error { - return h.service.UpdateDeviceTag(c.Ctx(), models.UID(req.UID), req.Tags) - }) - if err != nil { + if err := h.service.UpdateDeviceStatus(c.Ctx(), req); err != nil { return err } @@ -308,23 +241,17 @@ func (h *Handler) UpdateDeviceTag(c gateway.Context) error { } func (h *Handler) UpdateDevice(c gateway.Context) error { - var req requests.DeviceUpdate - if err := c.Bind(&req); err != nil { - return err - } + req := new(requests.DeviceUpdate) - if err := c.Validate(&req); err != nil { + if err := c.Bind(req); err != nil { return err } - var tenant string - if c.Tenant() != nil { - tenant = c.Tenant().ID + if err := c.Validate(req); err != nil { + return err } - if err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.Update, func() error { - return h.service.UpdateDevice(c.Ctx(), tenant, models.UID(req.UID), req.Name, req.PublicURL) - }); err != nil { + if err := h.service.UpdateDevice(c.Ctx(), req); err != nil { return err } diff --git a/api/routes/device_test.go b/api/routes/device_test.go index 7ae421bbb20..39cb86bb8a4 100644 --- a/api/routes/device_test.go +++ b/api/routes/device_test.go @@ -1,24 +1,25 @@ package routes import ( - "encoding/base64" "encoding/json" "fmt" "io" "net/http" "net/http/httptest" + "net/url" + "strconv" "strings" "testing" svc "github.com/shellhub-io/shellhub/api/services" - - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/services/mocks" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" gomock "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" ) func TestGetDevice(t *testing.T) { @@ -73,7 +74,7 @@ func TestGetDevice(t *testing.T) { req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/devices/%s", tc.uid), nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -91,6 +92,89 @@ func TestGetDevice(t *testing.T) { } } +func TestResolveDevice(t *testing.T) { + mock := new(mocks.Service) + + type Expected struct { + device *models.Device + status int + } + + cases := []struct { + description string + hostname string + uid string + headers map[string]string + requiredMocks func() + expected Expected + }{ + { + description: "succeeds when resolver is uid", + hostname: "", + uid: "uid", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Role": authorizer.RoleOwner.String(), + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, + requiredMocks: func() { + mock. + On("ResolveDevice", gomock.Anything, &requests.ResolveDevice{TenantID: "00000000-0000-4000-0000-000000000000", UID: "uid"}). + Return(&models.Device{}, nil). + Once() + }, + expected: Expected{ + device: &models.Device{}, + status: http.StatusOK, + }, + }, + { + description: "succeeds when resolver is hostname", + hostname: "hostname", + uid: "", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Role": authorizer.RoleOwner.String(), + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, + requiredMocks: func() { + mock. + On("ResolveDevice", gomock.Anything, &requests.ResolveDevice{TenantID: "00000000-0000-4000-0000-000000000000", Hostname: "hostname"}). + Return(&models.Device{}, nil). + Once() + }, + expected: Expected{ + device: &models.Device{}, + status: http.StatusOK, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/devices/resolve?hostname=%s&uid=%s", tc.hostname, tc.uid), nil) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + e := NewRouter(mock) + e.ServeHTTP(rec, req) + + assert.Equal(t, tc.expected.status, rec.Result().StatusCode) + + var session *models.Device + if err := json.NewDecoder(rec.Result().Body).Decode(&session); err != nil { + assert.ErrorIs(t, io.EOF, err) + } + + assert.Equal(t, tc.expected.device, session) + }) + } +} + func TestDeleteDevice(t *testing.T) { mock := new(mocks.Service) @@ -130,7 +214,7 @@ func TestDeleteDevice(t *testing.T) { req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/devices/%s", tc.uid), nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -157,7 +241,7 @@ func TestRenameDevice(t *testing.T) { DeviceParam: requests.DeviceParam{UID: ""}, }, tenant: "tenant-id", - requiredMocks: func(req requests.DeviceRename) {}, + requiredMocks: func(_ requests.DeviceRename) {}, expectedStatus: http.StatusNotFound, }, { @@ -197,7 +281,7 @@ func TestRenameDevice(t *testing.T) { req := httptest.NewRequest(http.MethodPatch, fmt.Sprintf("/api/devices/%s", tc.renamePayload.UID), strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) req.Header.Set("X-Tenant-ID", tc.tenant) rec := httptest.NewRecorder() @@ -209,200 +293,90 @@ func TestRenameDevice(t *testing.T) { } } -func TestGetDeviceByPublicURLAddress(t *testing.T) { +func TestGetDeviceList(t *testing.T) { mock := new(mocks.Service) type Expected struct { - expectedSession *models.Device - expectedStatus int + devices []models.Device + status int } + cases := []struct { - title string - address string + description string + req *requests.DeviceList requiredMocks func() expected Expected }{ { - title: "fails when bind fails to validate uid", - address: "", - requiredMocks: func() {}, - expected: Expected{ - expectedSession: nil, - expectedStatus: http.StatusNotFound, + description: "fails when try to get a device list existing", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatus("online"), + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "name", Order: "asc"}, + Filters: query.Filters{}, }, - }, - { - title: "fails when try to searching a device by the public URL address", - address: "exampleaddress", requiredMocks: func() { - mock.On("GetDeviceByPublicURLAddress", gomock.Anything, "exampleaddress").Return(nil, svc.ErrDeviceNotFound) + mock. + On("ListDevices", gomock.Anything, gomock.AnythingOfType("*requests.DeviceList")). + Return(nil, 0, svc.ErrDeviceNotFound). + Once() }, expected: Expected{ - expectedSession: nil, - expectedStatus: http.StatusNotFound, + devices: []models.Device{}, + status: http.StatusNotFound, }, }, { - title: "success when try to searching a device by the public URL address", - address: "example", + description: "fails when try to get a device list existing", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatus("online"), + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "name", Order: "asc"}, + Filters: query.Filters{}, + }, requiredMocks: func() { - mock.On("GetDeviceByPublicURLAddress", gomock.Anything, "example").Return(&models.Device{}, nil) + mock. + On("ListDevices", gomock.Anything, gomock.AnythingOfType("*requests.DeviceList")). + Return([]models.Device{}, 0, nil). + Once() }, expected: Expected{ - expectedSession: &models.Device{}, - expectedStatus: http.StatusOK, + devices: []models.Device{}, + status: http.StatusOK, }, }, } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { + t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/internal/devices/public/%s", tc.address), nil) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) - - var session *models.Device - if err := json.NewDecoder(rec.Result().Body).Decode(&session); err != nil { - assert.ErrorIs(t, io.EOF, err) - } - - assert.Equal(t, tc.expected.expectedSession, session) - }) - } -} - -func TestGetDeviceList(t *testing.T) { - mock := new(mocks.Service) - - filter := []map[string]interface{}{ - { - "type": "property", - "params": map[string]interface{}{ - "name": "name", - "operator": "contains", - "value": "examplespace", - }, - }, - } - - jsonData, err := json.Marshal(filter) - if err != nil { - assert.NoError(t, err) - } - - filteb64 := base64.StdEncoding.EncodeToString(jsonData) - type Expected struct { - expectedSession []models.Device - expectedStatus int - } - cases := []struct { - title string - filter string - queryPayload filterQuery - tenant string - requiredMocks func(query filterQuery) - expected Expected - }{ - { - title: "fails when try to get a device list existing", - queryPayload: filterQuery{ - Filter: filteb64, - Status: models.DeviceStatus("online"), - SortBy: "name", - OrderBy: "asc", - Query: paginator.Query{ - Page: 1, - PerPage: 10, - }, - }, - tenant: "tenant-id", - requiredMocks: func(query filterQuery) { - query.Normalize() - raw, err := base64.StdEncoding.DecodeString(query.Filter) - if err != nil { - assert.NoError(t, err) - } - - var filters []models.Filter - if err := json.Unmarshal(raw, &filters); len(raw) > 0 && err != nil { - assert.NoError(t, err) - } - - mock.On("ListDevices", gomock.Anything, "tenant-id", query.Query, filters, query.Status, query.SortBy, query.OrderBy).Return(nil, 0, svc.ErrDeviceNotFound).Once() - }, - expected: Expected{ - expectedSession: nil, - expectedStatus: http.StatusNotFound, - }, - }, - { - title: "fails when try to get a device list existing", - queryPayload: filterQuery{ - Filter: filteb64, - Status: models.DeviceStatus("online"), - SortBy: "name", - OrderBy: "asc", - Query: paginator.Query{ - Page: 1, - PerPage: 10, - }, - }, - tenant: "tenant-id", - requiredMocks: func(query filterQuery) { - query.Normalize() - raw, err := base64.StdEncoding.DecodeString(query.Filter) - if err != nil { - assert.NoError(t, err) - } - - var filters []models.Filter - if err := json.Unmarshal(raw, &filters); len(raw) > 0 && err != nil { - assert.NoError(t, err) - } + urlVal := &url.Values{} + urlVal.Set("page", strconv.Itoa(tc.req.Page)) + urlVal.Set("per_page", strconv.Itoa(tc.req.PerPage)) + urlVal.Set("sort_by", tc.req.By) + urlVal.Set("order_by", tc.req.Order) + urlVal.Set("status", string(tc.req.DeviceStatus)) - mock.On("ListDevices", gomock.Anything, "tenant-id", query.Query, filters, query.Status, query.SortBy, query.OrderBy).Return([]models.Device{}, 1, nil).Once() - }, - expected: Expected{ - expectedSession: []models.Device{}, - expectedStatus: http.StatusOK, - }, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.queryPayload) - - jsonData, err := json.Marshal(tc.queryPayload) - if err != nil { - assert.NoError(t, err) - } + req := httptest.NewRequest(http.MethodGet, "/api/devices?"+urlVal.Encode(), nil) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) + req.Header.Set("X-Tenant-ID", tc.req.TenantID) - req := httptest.NewRequest(http.MethodGet, "/api/devices", strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", tc.tenant) rec := httptest.NewRecorder() - e := NewRouter(mock) e.ServeHTTP(rec, req) - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) - - var session []models.Device - if err := json.NewDecoder(rec.Result().Body).Decode(&session); err != nil { - assert.ErrorIs(t, io.EOF, err) + devices := make([]models.Device, 0) + if len(tc.expected.devices) != 0 { + if err := json.NewDecoder(rec.Result().Body).Decode(&devices); err != nil { + require.ErrorIs(t, io.EOF, err) + } } - assert.Equal(t, tc.expected.expectedSession, session) + require.Equal(t, tc.expected.status, rec.Result().StatusCode) + require.Equal(t, tc.expected.devices, devices) }) } } @@ -426,7 +400,7 @@ func TestOfflineDevice(t *testing.T) { title: "fails when try to setting a non-existing device as offline", uid: "1234", requiredMocks: func() { - mock.On("OffineDevice", gomock.Anything, models.UID("1234"), false).Return(svc.ErrNotFound) + mock.On("OfflineDevice", gomock.Anything, models.UID("1234")).Return(svc.ErrNotFound) }, expectedStatus: http.StatusNotFound, }, @@ -434,7 +408,7 @@ func TestOfflineDevice(t *testing.T) { title: "success when try to setting an existing device as offline", uid: "123", requiredMocks: func() { - mock.On("OffineDevice", gomock.Anything, models.UID("123"), false).Return(nil) + mock.On("OfflineDevice", gomock.Anything, models.UID("123")).Return(nil) }, expectedStatus: http.StatusOK, }, @@ -446,7 +420,7 @@ func TestOfflineDevice(t *testing.T) { req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("/internal/devices/%s/offline", tc.uid), nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) req.Header.Set("X-Tenant-ID", "tenant-id") rec := httptest.NewRecorder() @@ -472,12 +446,9 @@ func TestLookupDevice(t *testing.T) { expected Expected }{ { - title: "fails when bind fails to validate uid", - request: requests.DeviceLookup{ - Username: "user1", - IPAddress: "192.168.1.100", - }, - requiredMocks: func(req requests.DeviceLookup) {}, + title: "fails when bind fails to validate uid", + request: requests.DeviceLookup{}, + requiredMocks: func(_ requests.DeviceLookup) {}, expected: Expected{ expectedSession: nil, expectedStatus: http.StatusBadRequest, @@ -486,13 +457,11 @@ func TestLookupDevice(t *testing.T) { { title: "fails when try to look up of a existing device", request: requests.DeviceLookup{ - Domain: "example.com", - Name: "device1", - Username: "user1", - IPAddress: "192.168.1.100", + TenantID: "example.com", + Name: "device1", }, requiredMocks: func(req requests.DeviceLookup) { - mock.On("LookupDevice", gomock.Anything, req.Domain, req.Name).Return(nil, svc.ErrDeviceNotFound).Once() + mock.On("LookupDevice", gomock.Anything, req.TenantID, req.Name).Return(nil, svc.ErrDeviceNotFound).Once() }, expected: Expected{ expectedSession: nil, @@ -502,13 +471,11 @@ func TestLookupDevice(t *testing.T) { { title: "success when try to look up of a existing device", request: requests.DeviceLookup{ - Domain: "example.com", - Name: "device1", - Username: "user1", - IPAddress: "192.168.1.100", + TenantID: "example.com", + Name: "device1", }, requiredMocks: func(req requests.DeviceLookup) { - mock.On("LookupDevice", gomock.Anything, req.Domain, req.Name).Return(&models.Device{}, nil) + mock.On("LookupDevice", gomock.Anything, req.TenantID, req.Name).Return(&models.Device{}, nil) }, expected: Expected{ expectedSession: &models.Device{}, @@ -526,9 +493,9 @@ func TestLookupDevice(t *testing.T) { assert.NoError(t, err) } - req := httptest.NewRequest(http.MethodGet, "/internal/lookup", strings.NewReader(string(jsonData))) + req := httptest.NewRequest(http.MethodGet, "/internal/device/lookup", strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -546,451 +513,58 @@ func TestLookupDevice(t *testing.T) { } } -func TestHeartbeatDevice(t *testing.T) { +func TestUpdateDevice(t *testing.T) { mock := new(mocks.Service) cases := []struct { - title string - uid string + description string + req requests.DeviceUpdate requiredMocks func() expectedStatus int }{ { - title: "fails when bind fails to validate uid", - uid: "", - requiredMocks: func() {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when try to heartbeat non-existing device", - uid: "1234", - requiredMocks: func() { - mock.On("DeviceHeartbeat", gomock.Anything, models.UID("1234")).Return(svc.ErrNotFound).Once() + description: "fails when try to update a existing device", + req: requests.DeviceUpdate{ + TenantID: "00000000-0000-4000-0000-000000000000", + UID: "1234", + Name: "name", }, - expectedStatus: http.StatusNotFound, - }, - { - title: "success when try to heartbeat of a existing device", - uid: "123", requiredMocks: func() { - mock.On("DeviceHeartbeat", gomock.Anything, models.UID("123")).Return(nil).Once() - }, - expectedStatus: http.StatusOK, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks() - - req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("/internal/devices/%s/heartbeat", tc.uid), nil) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", "tenant-id") - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) - }) - } -} - -func TestRemoveDeviceTag(t *testing.T) { - mock := new(mocks.Service) - - cases := []struct { - title string - updatePayload requests.DeviceRemoveTag - requiredMocks func(req requests.DeviceRemoveTag) - expectedStatus int - }{ - { - title: "fails when bind fails to validate uid", - updatePayload: requests.DeviceRemoveTag{ - DeviceParam: requests.DeviceParam{UID: ""}, - TagBody: requests.TagBody{Tag: "tag"}, - }, - requiredMocks: func(req requests.DeviceRemoveTag) {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because the tag does not have a min of 3 characters", - updatePayload: requests.DeviceRemoveTag{ - TagBody: requests.TagBody{Tag: "tg"}, - }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.DeviceRemoveTag) {}, - }, - { - title: "fails when validate because the tag does not have a max of 255 characters", - updatePayload: requests.DeviceRemoveTag{ - TagBody: requests.TagBody{Tag: "BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9"}, - }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.DeviceRemoveTag) {}, - }, - { - title: "fails when validate because have a '/' with in your characters", - updatePayload: requests.DeviceRemoveTag{ - TagBody: requests.TagBody{Tag: "test/"}, - }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.DeviceRemoveTag) {}, - }, - { - title: "fails when validate because have a '&' with in your characters", - updatePayload: requests.DeviceRemoveTag{ - TagBody: requests.TagBody{Tag: "test&"}, - }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.DeviceRemoveTag) {}, - }, - { - title: "fails when validate because have a '@' with in your characters", - updatePayload: requests.DeviceRemoveTag{ - TagBody: requests.TagBody{Tag: "test@"}, - }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.DeviceRemoveTag) {}, - }, - { - title: "fails when try to remove a non-existing device tag", - updatePayload: requests.DeviceRemoveTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - TagBody: requests.TagBody{Tag: "tag"}, - }, - requiredMocks: func(req requests.DeviceRemoveTag) { - mock.On("RemoveDeviceTag", gomock.Anything, models.UID("1234"), req.Tag).Return(svc.ErrNotFound) - }, - expectedStatus: http.StatusNotFound, - }, - { - title: "success when try to remove a existing device tag", - updatePayload: requests.DeviceRemoveTag{ - DeviceParam: requests.DeviceParam{UID: "123"}, - TagBody: requests.TagBody{Tag: "tag"}, - }, - - requiredMocks: func(req requests.DeviceRemoveTag) { - mock.On("RemoveDeviceTag", gomock.Anything, models.UID("123"), req.Tag).Return(nil) - }, - expectedStatus: http.StatusOK, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.updatePayload) - - jsonData, err := json.Marshal(tc.updatePayload) - if err != nil { - assert.NoError(t, err) - } - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/devices/%s/tags/%s", tc.updatePayload.UID, tc.updatePayload.Tag), strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", "tenant-id") - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) - }) - } -} - -func TestCreateDeviceTag(t *testing.T) { - mock := new(mocks.Service) - - cases := []struct { - title string - updatePayload requests.DeviceCreateTag - requiredMocks func(req requests.DeviceCreateTag) - expectedStatus int - }{ - { - title: "fails when bind fails to validate uid", - updatePayload: requests.DeviceCreateTag{ - DeviceParam: requests.DeviceParam{UID: ""}, - TagBody: requests.TagBody{Tag: "tag"}, - }, - requiredMocks: func(req requests.DeviceCreateTag) { - }, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because the tag does not have a min of 3 characters", - updatePayload: requests.DeviceCreateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - TagBody: requests.TagBody{Tag: "tg"}, - }, - requiredMocks: func(req requests.DeviceCreateTag) { - }, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because the tag does not have a max of 255 characters", - updatePayload: requests.DeviceCreateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - TagBody: requests.TagBody{Tag: "BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9"}, - }, - requiredMocks: func(req requests.DeviceCreateTag) { - }, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because have a '@' with in your characters", - updatePayload: requests.DeviceCreateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - TagBody: requests.TagBody{Tag: "test@"}, - }, - requiredMocks: func(req requests.DeviceCreateTag) { - }, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because have a '/' with in your characters", - updatePayload: requests.DeviceCreateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - TagBody: requests.TagBody{Tag: "test/"}, - }, - requiredMocks: func(req requests.DeviceCreateTag) { - }, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because have a '&' with in your characters", - updatePayload: requests.DeviceCreateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - TagBody: requests.TagBody{Tag: "test&"}, - }, - requiredMocks: func(req requests.DeviceCreateTag) { - }, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when try to create a non-existing device tag", - updatePayload: requests.DeviceCreateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - TagBody: requests.TagBody{Tag: "tag"}, - }, - requiredMocks: func(req requests.DeviceCreateTag) { - mock.On("CreateDeviceTag", gomock.Anything, models.UID("1234"), req.Tag).Return(svc.ErrNotFound) + mock.On("UpdateDevice", gomock.Anything, &requests.DeviceUpdate{TenantID: "00000000-0000-4000-0000-000000000000", UID: "1234", Name: "name"}). + Return(svc.ErrNotFound). + Once() }, expectedStatus: http.StatusNotFound, }, { - title: "fails when try to create a existing device tag", - updatePayload: requests.DeviceCreateTag{ - DeviceParam: requests.DeviceParam{UID: "123"}, - TagBody: requests.TagBody{Tag: "tag"}, + description: "success when try to update a existing device", + req: requests.DeviceUpdate{ + TenantID: "00000000-0000-4000-0000-000000000000", + UID: "1234", + Name: "name", }, - - requiredMocks: func(req requests.DeviceCreateTag) { - mock.On("CreateDeviceTag", gomock.Anything, models.UID("123"), req.Tag).Return(nil) - }, - expectedStatus: http.StatusOK, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.updatePayload) - - jsonData, err := json.Marshal(tc.updatePayload) - if err != nil { - assert.NoError(t, err) - } - - req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("/api/devices/%s/tags", tc.updatePayload.UID), strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", "tenant-id") - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) - }) - } -} - -func TestUpdateDeviceTag(t *testing.T) { - mock := new(mocks.Service) - - cases := []struct { - title string - updatePayload requests.DeviceUpdateTag - requiredMocks func(req requests.DeviceUpdateTag) - expectedStatus int - }{ - { - title: "fails when bind fails to validate uid", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: ""}, - Tags: []string{"tag1", "tag2"}, - }, - requiredMocks: func(req requests.DeviceUpdateTag) {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because have a duplicate tag", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - Tags: []string{"tagduplicated", "tagduplicated"}, - }, - requiredMocks: func(req requests.DeviceUpdateTag) {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because have a '@' with in your characters", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - Tags: []string{"test@"}, - }, - requiredMocks: func(req requests.DeviceUpdateTag) {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because have a '/' with in your characters", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - Tags: []string{"test/"}, - }, - requiredMocks: func(req requests.DeviceUpdateTag) {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because have a '&' with in your characters", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - Tags: []string{"test&"}, - }, - requiredMocks: func(req requests.DeviceUpdateTag) {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because the tag does not have a min of 3 characters", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - Tags: []string{"tg"}, - }, - requiredMocks: func(req requests.DeviceUpdateTag) {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when validate because the tag does not have a max of 255 characters", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - Tags: []string{"BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9"}, - }, - requiredMocks: func(req requests.DeviceUpdateTag) {}, - expectedStatus: http.StatusBadRequest, - }, - { - title: "fails when try to update a existing device tag", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - Tags: []string{"tag1", "tag2"}, - }, - requiredMocks: func(req requests.DeviceUpdateTag) { - mock.On("UpdateDeviceTag", gomock.Anything, models.UID("1234"), req.Tags).Return(svc.ErrNotFound) - }, - expectedStatus: http.StatusNotFound, - }, - { - title: "success when try to update a existing device tag", - updatePayload: requests.DeviceUpdateTag{ - DeviceParam: requests.DeviceParam{UID: "123"}, - Tags: []string{"tag1", "tag2"}, - }, - - requiredMocks: func(req requests.DeviceUpdateTag) { - mock.On("UpdateDeviceTag", gomock.Anything, models.UID("123"), req.Tags).Return(nil) - }, - expectedStatus: http.StatusOK, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.updatePayload) - - jsonData, err := json.Marshal(tc.updatePayload) - if err != nil { - assert.NoError(t, err) - } - - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/devices/%s/tags", tc.updatePayload.UID), strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", "tenant-id") - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) - }) - } -} - -func TestUpdateDevice(t *testing.T) { - mock := new(mocks.Service) - name := "new device name" - url := true - - cases := []struct { - title string - updatePayload requests.DeviceUpdate - requiredMocks func(req requests.DeviceUpdate) - expectedStatus int - }{ - { - title: "fails when try to uodate a existing device", - updatePayload: requests.DeviceUpdate{ - DeviceParam: requests.DeviceParam{UID: "1234"}, - Name: &name, - PublicURL: &url, - }, - requiredMocks: func(req requests.DeviceUpdate) { - mock.On("UpdateDevice", gomock.Anything, "tenant-id", models.UID("1234"), req.Name, req.PublicURL).Return(svc.ErrNotFound) - }, - expectedStatus: http.StatusNotFound, - }, - { - title: "success when try to update a existing device", - updatePayload: requests.DeviceUpdate{ - DeviceParam: requests.DeviceParam{UID: "123"}, - Name: &name, - PublicURL: &url, - }, - - requiredMocks: func(req requests.DeviceUpdate) { - mock.On("UpdateDevice", gomock.Anything, "tenant-id", models.UID("123"), req.Name, req.PublicURL).Return(nil) + requiredMocks: func() { + mock.On("UpdateDevice", gomock.Anything, &requests.DeviceUpdate{TenantID: "00000000-0000-4000-0000-000000000000", UID: "1234", Name: "name"}). + Return(nil). + Once() }, expectedStatus: http.StatusOK, }, } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.updatePayload) + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() - jsonData, err := json.Marshal(tc.updatePayload) + jsonData, err := json.Marshal(tc.req) if err != nil { assert.NoError(t, err) } - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/devices/%s", tc.updatePayload.UID), strings.NewReader(string(jsonData))) + req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/devices/%s", tc.req.UID), strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", "tenant-id") + req.Header.Set("X-Role", authorizer.RoleOwner.String()) + req.Header.Set("X-Tenant-ID", "00000000-0000-4000-0000-000000000000") rec := httptest.NewRecorder() e := NewRouter(mock) diff --git a/api/routes/handler.go b/api/routes/handler.go index a1a2598f1ef..9e00007d20e 100644 --- a/api/routes/handler.go +++ b/api/routes/handler.go @@ -2,12 +2,18 @@ package routes import ( svc "github.com/shellhub-io/shellhub/api/services" + "github.com/shellhub-io/shellhub/pkg/websocket" ) type Handler struct { service svc.Service + // WebSocketUpgrader is used to turns a HTTP request into WebSocketUpgrader connection. + WebSocketUpgrader websocket.Upgrader } -func NewHandler(s svc.Service) *Handler { - return &Handler{service: s} +func NewHandler(s svc.Service, w websocket.Upgrader) *Handler { + return &Handler{ + service: s, + WebSocketUpgrader: w, + } } diff --git a/api/routes/healthcheck_test.go b/api/routes/healthcheck_test.go index 874faad416c..c857c4259a5 100644 --- a/api/routes/healthcheck_test.go +++ b/api/routes/healthcheck_test.go @@ -14,7 +14,7 @@ import ( func TestEvaluateHealth(t *testing.T) { e := echo.New() mock := new(mocks.Service) - h := NewHandler(mock) + h := NewHandler(mock, nil) cases := []struct { title string diff --git a/api/routes/middleware/authorize.go b/api/routes/middleware/authorize.go index 872f52462d1..52e50fd5346 100644 --- a/api/routes/middleware/authorize.go +++ b/api/routes/middleware/authorize.go @@ -6,6 +6,7 @@ import ( "github.com/labstack/echo/v4" "github.com/shellhub-io/shellhub/api/pkg/gateway" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" ) func Authorize(next echo.HandlerFunc) echo.HandlerFunc { @@ -14,10 +15,39 @@ func Authorize(next echo.HandlerFunc) echo.HandlerFunc { id := gateway.IDFromContext(ctx) tenant := gateway.TenantFromContext(ctx) - if id != nil && tenant == nil { + gCtx := c.(*gateway.Context) + + // Allow admins to access resources without tenant scope (e.g., from /admin/api endpoints) + if id != nil && tenant == nil && !gCtx.IsAdmin() { + return c.NoContent(http.StatusForbidden) + } + + return next(c) + } +} + +// BlockAPIKey blocks request using API keys to continue. +func BlockAPIKey(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if key := c.Request().Header.Get("X-API-Key"); key != "" { return c.NoContent(http.StatusForbidden) } return next(c) } } + +// RequiresPermission reports whether the client has the specified permission. +// If not, it returns an [http.StatusForbidden] response. Otherwise, it executes +// the next handler. +func RequiresPermission(permission authorizer.Permission) echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if ctx, ok := c.(*gateway.Context); !ok || !ctx.Role().HasPermission(permission) { + return c.NoContent(http.StatusForbidden) + } + + return next(c) + } + } +} diff --git a/api/routes/middleware/openapi.go b/api/routes/middleware/openapi.go new file mode 100644 index 00000000000..7fd5228eb65 --- /dev/null +++ b/api/routes/middleware/openapi.go @@ -0,0 +1,142 @@ +package middleware + +import ( + "bytes" + "context" + "io" + "net/http" + "net/url" + "sync" + + "github.com/labstack/echo/v4" + "github.com/shellhub-io/shellhub/api/pkg/openapi" + "github.com/sirupsen/logrus" +) + +var ( + // Global validator instance + globalValidator *openapi.OpenAPIValidator + validatorOnce sync.Once + validatorErr error +) + +type capture struct { + http.ResponseWriter + body *bytes.Buffer + statusCode int +} + +func (rw *capture) Write(b []byte) (int, error) { + rw.body.Write(b) + + return rw.ResponseWriter.Write(b) +} + +func (rw *capture) WriteHeader(statusCode int) { + rw.statusCode = statusCode + rw.ResponseWriter.WriteHeader(statusCode) +} + +// OpenAPIValidatorConfig holds the configuration for schema validation middleware +type OpenAPIValidatorConfig struct { + // EnabledPaths specifies which paths to validate (nil = all paths) + EnabledPaths []string + // FailOnMismatch determines if validation failures should return HTTP errors + FailOnMismatch bool + // SchemaPath overrides the default schema path + SchemaPath *url.URL + // Skipper defines a function to skip middleware. If Skipper returns true, middleware is skipped. + Skipper func(echo.Context) bool +} + +type OpenAPIValidationMessage struct { + Message string `json:"message"` + Errors []string `json:"errors"` +} + +// OpenAPIValidator returns a middleware that validates API responses against OpenAPIValidator schema +func OpenAPIValidator(cfg *OpenAPIValidatorConfig) echo.MiddlewareFunc { + if cfg == nil { + cfg = &OpenAPIValidatorConfig{} + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if cfg.Skipper != nil && cfg.Skipper(c) { + return next(c) + } + + validator := getOrCreateValidator(*cfg) + if validator == nil { + return next(c) + } + + req := c.Request() + res := c.Response() + + body := &bytes.Buffer{} + + rw := &capture{ + ResponseWriter: res.Writer, + body: body, + statusCode: 200, + } + + res.Writer = rw + + err := next(c) + + response := &http.Response{ + StatusCode: rw.statusCode, + Header: res.Header(), + Body: io.NopCloser(bytes.NewReader(body.Bytes())), + ContentLength: int64(body.Len()), + } + result := validator.ValidateResponse(req, response, body.Bytes()) + + logger := logrus.WithFields(logrus.Fields{ + "path": result.Path, + "method": result.Method, + "status_code": result.StatusCode, + "valid": result.Valid, + }) + + if result.Valid { + logger.Debug("OpenAPI response validation passed") + } else { + logger.WithField("error", result.Error).Warn("OpenAPI response validation failed") + } + + return err + } + } +} + +// getOrCreateValidator initializes or returns the global validator instance +func getOrCreateValidator(cfg OpenAPIValidatorConfig) *openapi.OpenAPIValidator { + validatorOnce.Do(func() { + logger := logrus.WithField("component", "openapi_validator") + + validatorConfig := &openapi.OpenAPIValidatorConfig{ + SchemaPath: cfg.SchemaPath, + EnabledPaths: cfg.EnabledPaths, + FailOnMismatch: cfg.FailOnMismatch, + Logger: logger, + } + + ctx := context.Background() + + globalValidator, validatorErr = openapi.NewOpenAPIValidator(ctx, validatorConfig) + if validatorErr != nil { + logger.WithError(validatorErr).Error("Failed to initialize OpenAPI validator") + + return + } + }) + + if validatorErr != nil { + return nil + } + + return globalValidator +} diff --git a/api/routes/nsadm.go b/api/routes/nsadm.go index 1f59cc11a33..79a6cbe3376 100644 --- a/api/routes/nsadm.go +++ b/api/routes/nsadm.go @@ -1,15 +1,11 @@ package routes import ( - "encoding/base64" - "encoding/json" "net/http" "strconv" "github.com/shellhub-io/shellhub/api/pkg/gateway" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/pkg/api/requests" - "github.com/shellhub-io/shellhub/pkg/models" ) const ( @@ -18,9 +14,10 @@ const ( GetNamespaceURL = "/namespaces/:tenant" DeleteNamespaceURL = "/namespaces/:tenant" EditNamespaceURL = "/namespaces/:tenant" - AddNamespaceUserURL = "/namespaces/:tenant/members" - RemoveNamespaceUserURL = "/namespaces/:tenant/members/:uid" - EditNamespaceUserURL = "/namespaces/:tenant/members/:uid" + LeaveNamespaceURL = "/namespaces/:tenant/members" + AddNamespaceMemberURL = "/namespaces/:tenant/members" + RemoveNamespaceMemberURL = "/namespaces/:tenant/members/:uid" + EditNamespaceMemberURL = "/namespaces/:tenant/members/:uid" GetSessionRecordURL = "/users/security" EditSessionRecordStatusURL = "/users/security/:tenant" ) @@ -31,22 +28,22 @@ const ( ) func (h *Handler) GetNamespaceList(c gateway.Context) error { - query := filterQuery{} - if err := c.Bind(&query); err != nil { + req := new(requests.NamespaceList) + + if err := c.Bind(req); err != nil { return err } - raw, err := base64.StdEncoding.DecodeString(query.Filter) - if err != nil { + req.Paginator.Normalize() + if err := req.Filters.Unmarshal(); err != nil { return err } - var filter []models.Filter - if err := json.Unmarshal(raw, &filter); len(raw) > 0 && err != nil { + if err := c.Validate(req); err != nil { return err } - namespaces, count, err := h.service.ListNamespaces(c.Ctx(), query.Query, filter, false) + namespaces, count, err := h.service.ListNamespaces(c.Ctx(), req) if err != nil { return err } @@ -57,17 +54,17 @@ func (h *Handler) GetNamespaceList(c gateway.Context) error { } func (h *Handler) CreateNamespace(c gateway.Context) error { - var req requests.NamespaceCreate - if err := c.Bind(&req); err != nil { + req := new(requests.NamespaceCreate) + + if err := c.Bind(req); err != nil { return err } - var userID string - if v := c.ID(); v != nil { - userID = v.ID + if err := c.Validate(req); err != nil { + return err } - namespace, err := h.service.CreateNamespace(c.Ctx(), req, userID) + namespace, err := h.service.CreateNamespace(c.Ctx(), req) if err != nil { return err } @@ -114,22 +111,7 @@ func (h *Handler) DeleteNamespace(c gateway.Context) error { return err } - var uid string - if c.ID() != nil { - uid = c.ID().ID - } - - ns, err := h.service.GetNamespace(c.Ctx(), req.Tenant) - if err != nil || ns == nil { - return c.NoContent(http.StatusNotFound) - } - - err = guard.EvaluateNamespace(ns, uid, guard.Actions.Namespace.Delete, func() error { - err := h.service.DeleteNamespace(c.Ctx(), ns.TenantID) - - return err - }) - if err != nil { + if err := h.service.DeleteNamespace(c.Ctx(), req.Tenant); err != nil { return err } @@ -137,133 +119,96 @@ func (h *Handler) DeleteNamespace(c gateway.Context) error { } func (h *Handler) EditNamespace(c gateway.Context) error { - var req requests.NamespaceEdit - if err := c.Bind(&req); err != nil { - return err - } + req := new(requests.NamespaceEdit) - if err := c.Validate(&req); err != nil { + if err := c.Bind(req); err != nil { return err } - var uid string - if c.ID() != nil { - uid = c.ID().ID - } - - namespace, err := h.service.GetNamespace(c.Ctx(), req.Tenant) - if err != nil || namespace == nil { - return c.NoContent(http.StatusNotFound) + if err := c.Validate(req); err != nil { + return err } - var nns *models.Namespace - err = guard.EvaluateNamespace(namespace, uid, guard.Actions.Namespace.Rename, func() error { - var err error - nns, err = h.service.EditNamespace(c.Ctx(), namespace.TenantID, req.Name) - - return err - }) + res, err := h.service.EditNamespace(c.Ctx(), req) if err != nil { return err } - return c.JSON(http.StatusOK, nns) + return c.JSON(http.StatusOK, res) } -func (h *Handler) AddNamespaceUser(c gateway.Context) error { - var req requests.NamespaceAddUser - if err := c.Bind(&req); err != nil { - return err - } +func (h *Handler) AddNamespaceMember(c gateway.Context) error { + req := new(requests.NamespaceAddMember) - if err := c.Validate(&req); err != nil { + if err := c.Bind(req); err != nil { return err } - var uid string - if c.ID() != nil { - uid = c.ID().ID - } - - ns, err := h.service.GetNamespace(c.Ctx(), req.Tenant) - if err != nil || ns == nil { - return c.NoContent(http.StatusNotFound) + if err := c.Validate(req); err != nil { + return err } - var namespace *models.Namespace - err = guard.EvaluateNamespace(ns, uid, guard.Actions.Namespace.AddMember, func() error { - var err error - namespace, err = h.service.AddNamespaceUser(c.Ctx(), req.Username, req.Role, ns.TenantID, uid) - - return err - }) + res, err := h.service.AddNamespaceMember(c.Ctx(), req) if err != nil { return err } - return c.JSON(http.StatusOK, namespace) + return c.JSON(http.StatusOK, res) } -func (h *Handler) RemoveNamespaceUser(c gateway.Context) error { - var req requests.NamespaceRemoveUser - if err := c.Bind(&req); err != nil { - return err - } +func (h *Handler) RemoveNamespaceMember(c gateway.Context) error { + req := new(requests.NamespaceRemoveMember) - if err := c.Validate(&req); err != nil { + if err := c.Bind(req); err != nil { return err } - var uid string - if v := c.ID(); v != nil { - uid = c.ID().ID - } - - ns, err := h.service.GetNamespace(c.Ctx(), req.Tenant) - if err != nil || ns == nil { - return c.NoContent(http.StatusNotFound) + if err := c.Validate(req); err != nil { + return err } - var nns *models.Namespace - err = guard.EvaluateNamespace(ns, uid, guard.Actions.Namespace.RemoveMember, func() error { - var err error - nns, err = h.service.RemoveNamespaceUser(c.Ctx(), ns.TenantID, req.MemberUID, uid) - - return err - }) + res, err := h.service.RemoveNamespaceMember(c.Ctx(), req) if err != nil { return err } - return c.JSON(http.StatusOK, nns) + return c.JSON(http.StatusOK, res) } -func (h *Handler) EditNamespaceUser(c gateway.Context) error { - var req requests.NamespaceEditUser - if err := c.Bind(&req); err != nil { +func (h *Handler) LeaveNamespace(c gateway.Context) error { + req := new(requests.LeaveNamespace) + + if err := c.Bind(req); err != nil { return err } - if err := c.Validate(&req); err != nil { + if err := c.Validate(req); err != nil { return err } - var uid string - if c.ID() != nil { - uid = c.ID().ID + res, err := h.service.LeaveNamespace(c.Ctx(), req) + switch { + case err != nil: + return err + case res != nil: + return c.JSON(http.StatusOK, res) + default: + return c.NoContent(http.StatusOK) } +} - ns, err := h.service.GetNamespace(c.Ctx(), req.Tenant) - if err != nil || ns == nil { - return c.NoContent(http.StatusNotFound) - } +func (h *Handler) EditNamespaceMember(c gateway.Context) error { + req := new(requests.NamespaceUpdateMember) - err = guard.EvaluateNamespace(ns, uid, guard.Actions.Namespace.EditMember, func() error { - err := h.service.EditNamespaceUser(c.Ctx(), ns.TenantID, uid, req.MemberUID, req.Role) + if err := c.Bind(req); err != nil { + return err + } + if err := c.Validate(req); err != nil { return err - }) - if err != nil { + } + + if err := h.service.UpdateNamespaceMember(c.Ctx(), req); err != nil { return err } @@ -280,22 +225,7 @@ func (h *Handler) EditSessionRecordStatus(c gateway.Context) error { return err } - var uid string - if c.ID() != nil { - uid = c.ID().ID - } - - ns, err := h.service.GetNamespace(c.Ctx(), req.Tenant) - if err != nil || ns == nil { - return c.NoContent(http.StatusNotFound) - } - - err = guard.EvaluateNamespace(ns, uid, guard.Actions.Namespace.EnableSessionRecord, func() error { - err := h.service.EditSessionRecordStatus(c.Ctx(), req.SessionRecord, ns.TenantID) - - return err - }) - if err != nil { + if err := h.service.EditSessionRecordStatus(c.Ctx(), req.SessionRecord, req.Tenant); err != nil { return err } diff --git a/api/routes/nsadm_test.go b/api/routes/nsadm_test.go index 76c7305003c..e5884426d3a 100644 --- a/api/routes/nsadm_test.go +++ b/api/routes/nsadm_test.go @@ -2,17 +2,17 @@ package routes import ( "encoding/json" + "errors" "fmt" "io" "net/http" "net/http/httptest" "strings" "testing" - "time" - "github.com/shellhub-io/shellhub/api/pkg/guard" svc "github.com/shellhub-io/shellhub/api/services" "github.com/shellhub-io/shellhub/api/services/mocks" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" @@ -28,17 +28,17 @@ func TestCreateNamespace(t *testing.T) { } cases := []struct { title string - uid string + userID string req string expected Expected requiredMocks func() }{ { - title: "fails when try to creating a namespace", - uid: "123", - req: `{ "name": "example", "tenant": "tenant"}`, + title: "fails when try to creating a namespace", + userID: "00000000-0000-4000-0000-000000000000", + req: `{ "name": "namespace", "tenant": "36512069-be88-497a-b0ec-03ed05b1f7e7"}`, requiredMocks: func() { - mock.On("CreateNamespace", gomock.Anything, gomock.AnythingOfType("requests.NamespaceCreate"), "123").Return(nil, svc.ErrNotFound).Once() + mock.On("CreateNamespace", gomock.Anything, gomock.AnythingOfType("*requests.NamespaceCreate")).Return(nil, svc.ErrNotFound).Once() }, expected: Expected{ expectedStatus: http.StatusNotFound, @@ -46,11 +46,11 @@ func TestCreateNamespace(t *testing.T) { }, }, { - title: "success when try to creating a namespace", - uid: "123", - req: `{ "name": "example", "tenant": "tenant"}`, + title: "success when try to creating a namespace", + userID: "123", + req: `{ "name": "namespace", "tenant": "36512069-be88-497a-b0ec-03ed05b1f7e7"}`, requiredMocks: func() { - mock.On("CreateNamespace", gomock.Anything, gomock.AnythingOfType("requests.NamespaceCreate"), "123").Return(&models.Namespace{}, nil).Once() + mock.On("CreateNamespace", gomock.Anything, gomock.AnythingOfType("*requests.NamespaceCreate")).Return(&models.Namespace{}, nil).Once() }, expected: Expected{ expectedStatus: http.StatusOK, @@ -65,8 +65,8 @@ func TestCreateNamespace(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/api/namespaces", strings.NewReader(tc.req)) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-ID", "123") + req.Header.Set("X-Role", authorizer.RoleOwner.String()) + req.Header.Set("X-ID", "00000000-0000-4000-0000-000000000000") rec := httptest.NewRecorder() e := NewRouter(mock) @@ -127,9 +127,9 @@ func TestGetNamespace(t *testing.T) { { title: "success when try to get a existing namespace", uid: "123", - req: "tenant", + req: "00000000-0000-4000-0000-000000000000", requiredMocks: func() { - mock.On("GetNamespace", gomock.Anything, "tenant").Return(&models.Namespace{}, nil) + mock.On("GetNamespace", gomock.Anything, "00000000-0000-4000-0000-000000000000").Return(&models.Namespace{}, nil) }, expected: Expected{ expectedStatus: http.StatusOK, @@ -145,7 +145,7 @@ func TestGetNamespace(t *testing.T) { req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/namespaces/%s", tc.req), nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -205,50 +205,18 @@ func TestDeleteNamespace(t *testing.T) { { title: "fails when try to deleting a existing namespace", uid: "123", - req: "tenant-id", + req: "00000000-0000-4000-0000-000000000000", requiredMocks: func() { - mock.On("GetNamespace", gomock.Anything, "tenant-id").Return(&models.Namespace{ - Name: "namespace-name", - Owner: "owner-name", - TenantID: "tenant-id", - Members: []models.Member{ - {ID: "123", Username: "userexemple", Role: "owner"}, - }, - Settings: &models.NamespaceSettings{}, - Devices: 10, - Sessions: 5, - MaxDevices: 100, - DevicesCount: 50, - CreatedAt: time.Now(), - Billing: &models.Billing{}, - }, nil).Once() - - mock.On("DeleteNamespace", gomock.Anything, "tenant-id").Return(svc.ErrNotFound).Once() + mock.On("DeleteNamespace", gomock.Anything, "00000000-0000-4000-0000-000000000000").Return(svc.ErrNotFound).Once() }, expectedStatus: http.StatusNotFound, }, { title: "success when try to deleting a existing namespace", uid: "123", - req: "tenant-id", + req: "00000000-0000-4000-0000-000000000000", requiredMocks: func() { - mock.On("GetNamespace", gomock.Anything, "tenant-id").Return(&models.Namespace{ - Name: "namespace-name", - Owner: "owner-name", - TenantID: "tenant-id", - Members: []models.Member{ - {ID: "123", Username: "userexemple", Role: "owner"}, - }, - Settings: &models.NamespaceSettings{}, - Devices: 10, - Sessions: 5, - MaxDevices: 100, - DevicesCount: 50, - CreatedAt: time.Now(), - Billing: &models.Billing{}, - }, nil).Once() - - mock.On("DeleteNamespace", gomock.Anything, "tenant-id").Return(nil).Once() + mock.On("DeleteNamespace", gomock.Anything, "00000000-0000-4000-0000-000000000000").Return(nil).Once() }, expectedStatus: http.StatusOK, }, @@ -260,7 +228,7 @@ func TestDeleteNamespace(t *testing.T) { req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/namespaces/%s", tc.req), nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) req.Header.Set("X-ID", tc.uid) rec := httptest.NewRecorder() @@ -283,7 +251,6 @@ func TestGetSessionRecord(t *testing.T) { requiredMocks func() expectedStatus int }{ - { name: "fails when try to get a session record of a non-existing session", tenant: "tenant", @@ -308,7 +275,7 @@ func TestGetSessionRecord(t *testing.T) { req := httptest.NewRequest(http.MethodGet, "/api/users/security", nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) req.Header.Set("X-Tenant-ID", tc.tenant) rec := httptest.NewRecorder() @@ -323,110 +290,171 @@ func TestGetSessionRecord(t *testing.T) { } func TestEditNamespace(t *testing.T) { - mock := new(mocks.Service) + svcMock := new(mocks.Service) cases := []struct { - title string - uid string - req string - requiredMocks func() - expectedStatus int + description string + headers map[string]string + body map[string]interface{} + requiredMocks func() + expected int }{ { - title: "fails when bind fails to validate uid", - uid: "123", - req: `{"session_record": true, "tenant": ""}`, - expectedStatus: http.StatusNotFound, - requiredMocks: func() {}, - }, - { - title: "fails when validate because the tenant does not have a min of 3 characters", - uid: "123", - req: `{"session_record": true, "tenant": "id"}`, - expectedStatus: http.StatusBadRequest, - requiredMocks: func() {}, + description: "fails when role is observer", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "observer", + "X-ID": "000000000000000000000000", + }, + body: map[string]interface{}{ + "session_record": true, + }, + requiredMocks: func() { + }, + expected: http.StatusForbidden, }, { - title: "fails when validate because the tenant does not have a max of 255 characters", - uid: "123", - req: `{"session_record": true, "tenant": "BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9"}`, - expectedStatus: http.StatusBadRequest, - requiredMocks: func() {}, + description: "fails when role is operator", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "operator", + "X-ID": "000000000000000000000000", + }, + body: map[string]interface{}{ + "session_record": true, + }, + requiredMocks: func() { + }, + expected: http.StatusForbidden, }, { - title: "fails when try to editing an non-existing namespace", - uid: "123", - req: `{"session_record": true, "tenant": "tenant-id"}`, + description: "fails when try to editing an non-existing namespace", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + "X-ID": "000000000000000000000000", + }, + body: map[string]interface{}{ + "session_record": true, + }, requiredMocks: func() { - mock.On("GetNamespace", gomock.Anything, "tenant-id").Return(&models.Namespace{ - Name: "namespace-name", - Owner: "owner-name", - TenantID: "tenant-id", - Members: []models.Member{ - {ID: "123", Username: "userexemple", Role: "owner"}, - }, - Settings: &models.NamespaceSettings{}, - Devices: 10, - Sessions: 5, - MaxDevices: 100, - DevicesCount: 50, - CreatedAt: time.Now(), - Billing: &models.Billing{}, - }, nil).Once() - - mock.On("EditSessionRecordStatus", gomock.Anything, true, "tenant-id").Return(svc.ErrNotFound).Once() + svcMock. + On("EditSessionRecordStatus", gomock.Anything, true, "00000000-0000-4000-0000-000000000000"). + Return(svc.ErrNotFound). + Once() }, - expectedStatus: http.StatusNotFound, + expected: http.StatusNotFound, }, { - title: "success when try to editing an existing namespace", - uid: "123", - req: `{"session_record": true, "tenant": "tenant-id"}`, + description: "success when try to editing an existing namespace", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + "X-ID": "000000000000000000000000", + }, + body: map[string]interface{}{ + "session_record": true, + "tenant": "00000000-0000-4000-0000-000000000000", + }, requiredMocks: func() { - mock.On("GetNamespace", gomock.Anything, "tenant-id").Return(&models.Namespace{ - Name: "namespace-name", - Owner: "owner-name", - TenantID: "tenant-id", - Members: []models.Member{ - {ID: "123", Username: "userexemple", Role: "owner"}, - }, - Settings: &models.NamespaceSettings{}, - Devices: 10, - Sessions: 5, - MaxDevices: 100, - DevicesCount: 50, - CreatedAt: time.Now(), - Billing: &models.Billing{}, - }, nil).Once() - - mock.On("EditSessionRecordStatus", gomock.Anything, true, "tenant-id").Return(nil).Once() + svcMock. + On("EditSessionRecordStatus", gomock.Anything, true, "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() }, - expectedStatus: http.StatusOK, + expected: http.StatusOK, }, } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { + t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - var data requests.SessionEditRecordStatus - err := json.Unmarshal([]byte(tc.req), &data) + jsonData, err := json.Marshal(tc.body) if err != nil { assert.NoError(t, err) } - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/users/security/%s", data.Tenant), strings.NewReader(tc.req)) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-ID", tc.uid) + req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/users/security/%s", tc.headers["X-Tenant-ID"]), strings.NewReader(string(jsonData))) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + rec := httptest.NewRecorder() - e := NewRouter(mock) + e := NewRouter(svcMock) e.ServeHTTP(rec, req) - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) + assert.Equal(t, tc.expected, rec.Result().StatusCode) }) } - mock.AssertExpectations(t) + svcMock.AssertExpectations(t) +} + +func TestHandler_LeaveNamespace(t *testing.T) { + svcMock := new(mocks.Service) + + cases := []struct { + description string + tenantID string + headers map[string]string + requiredMocks func() + expected int + }{ + { + description: "fails to leave the namespace", + tenantID: "00000000-0000-4000-0000-000000000000", + headers: map[string]string{ + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, + requiredMocks: func() { + svcMock. + On("LeaveNamespace", gomock.Anything, &requests.LeaveNamespace{UserID: "000000000000000000000000", TenantID: "00000000-0000-4000-0000-000000000000", AuthenticatedTenantID: "00000000-0000-4000-0000-000000000000"}). + Return(nil, errors.New("error")). + Once() + }, + expected: http.StatusInternalServerError, + }, + { + description: "success to leave the namespace", + tenantID: "00000000-0000-4000-0000-000000000000", + headers: map[string]string{ + "X-ID": "000000000000000000000000", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, + requiredMocks: func() { + svcMock. + On("LeaveNamespace", gomock.Anything, &requests.LeaveNamespace{UserID: "000000000000000000000000", TenantID: "00000000-0000-4000-0000-000000000000", AuthenticatedTenantID: "00000000-0000-4000-0000-000000000000"}). + Return(nil, nil). + Once() + }, + expected: http.StatusOK, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tc.requiredMocks() + + req := httptest.NewRequest(http.MethodDelete, "/api/namespaces/"+tc.tenantID+"/members", nil) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + + rec := httptest.NewRecorder() + + e := NewRouter(svcMock) + e.ServeHTTP(rec, req) + + assert.Equal(tt, tc.expected, rec.Result().StatusCode) + }) + } + + svcMock.AssertExpectations(t) } diff --git a/api/routes/routes.go b/api/routes/routes.go index aee1c498af2..204eb3f75f4 100644 --- a/api/routes/routes.go +++ b/api/routes/routes.go @@ -1,108 +1,200 @@ package routes import ( + "net/http" + + "github.com/getsentry/sentry-go" + "github.com/labstack/echo-contrib/echoprometheus" "github.com/labstack/echo/v4" + echoMiddleware "github.com/labstack/echo/v4/middleware" "github.com/shellhub-io/shellhub/api/pkg/echo/handlers" "github.com/shellhub-io/shellhub/api/pkg/gateway" - apiMiddleware "github.com/shellhub-io/shellhub/api/routes/middleware" + routesmiddleware "github.com/shellhub-io/shellhub/api/routes/middleware" "github.com/shellhub-io/shellhub/api/services" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/envs" + pkgmiddleware "github.com/shellhub-io/shellhub/pkg/middleware" + "github.com/shellhub-io/shellhub/pkg/websocket" + "github.com/sirupsen/logrus" ) -func NewRouter(service services.Service) *echo.Echo { - e := echo.New() - e.Binder = handlers.NewBinder() - e.Validator = handlers.NewValidator() - e.HTTPErrorHandler = handlers.NewErrors(nil) +type DefaultHTTPHandlerConfig struct { + // Reporter represents an instance of [*sentry.Client] that should be proper configured to send error messages + // from the error handler. If it's nil, the error handler will ignore the Sentry client. + Reporter *sentry.Client +} - e.Use(func(next echo.HandlerFunc) echo.HandlerFunc { - return func(c echo.Context) error { - apicontext := gateway.NewContext(service, c) +// DefaultHTTPHandler creates an HTTP handler, using [github.com/labstack/echo/v4] package, with the default +// configuration required by ShellHub's services, loading the [github.com/shellhub-io/shellhub/api/pkg/gateway] into +// the context, and the service layer. The configuration received controls the error reporter and more. +func DefaultHTTPHandler[S any](service S, cfg *DefaultHTTPHandlerConfig) http.Handler { + server := echo.New() + + // Sets the default binder. + server.Binder = handlers.NewBinder() + + // Sets the default validator. + server.Validator = handlers.NewValidator() + + // Defines the default errors handler. + server.HTTPErrorHandler = handlers.NewErrors(cfg.Reporter) - return next(apicontext) + // Configures the default IP extractor for a header. + server.IPExtractor = echo.ExtractIPFromRealIPHeader() + + // NOTE: Instantiates a new logger instance to be used by the logger's middleware. + server.Logger = pkgmiddleware.NewEchoLogger(logrus.NewEntry(logrus.StandardLogger())) + + server.Use(echoMiddleware.RequestID()) + server.Use(echoMiddleware.Secure()) + server.Use(func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + // NOTE: We load the gateway context to each route handler to access their context as gateway's context. + // https://echo.labstack.com/docs/context + return next(gateway.NewContext(service, c)) } }) + server.Use(pkgmiddleware.Log) + + return server +} + +type Option func(e *echo.Echo, handler *Handler) error + +func WithReporter(reporter *sentry.Client) Option { + return func(e *echo.Echo, _ *Handler) error { + e.HTTPErrorHandler = handlers.NewErrors(reporter) + + return nil + } +} + +func WithMetrics() Option { + return func(e *echo.Echo, _ *Handler) error { + e.Use(echoprometheus.NewMiddleware("api")) + e.GET("/metrics", echoprometheus.NewHandler()) + + return nil + } +} + +func WithOpenAPIValidator(cfg *routesmiddleware.OpenAPIValidatorConfig) Option { + return func(e *echo.Echo, _ *Handler) error { + e.Use(routesmiddleware.OpenAPIValidator(cfg)) + + return nil + } +} - handler := NewHandler(service) +func NewRouter(service services.Service, opts ...Option) *echo.Echo { + router := DefaultHTTPHandler(service, new(DefaultHTTPHandlerConfig)).(*echo.Echo) + + handler := NewHandler(service, websocket.NewGorillaWebSocketUpgrader()) + for _, opt := range opts { + if err := opt(router, handler); err != nil { + return nil + } + } // Internal routes only accessible by other services in the local container network - internalAPI := e.Group("/internal") + internalAPI := router.Group("/internal") - internalAPI.GET(AuthRequestURL, gateway.Handler(handler.AuthRequest), gateway.Middleware(AuthMiddleware)) - internalAPI.GET(AuthUserTokenInternalURL, gateway.Handler(handler.AuthGetToken)) + internalAPI.GET(AuthRequestURL, gateway.Handler(handler.AuthRequest)) + internalAPI.GET(AuthUserTokenInternalURL, gateway.Handler(handler.CreateUserToken)) // TODO: same as defined in public API. remove it. - internalAPI.GET(GetDeviceByPublicURLAddress, gateway.Handler(handler.GetDeviceByPublicURLAddress)) internalAPI.POST(OfflineDeviceURL, gateway.Handler(handler.OfflineDevice)) - internalAPI.POST(HeartbeatDeviceURL, gateway.Handler(handler.HeartbeatDevice)) internalAPI.GET(LookupDeviceURL, gateway.Handler(handler.LookupDevice)) - internalAPI.PATCH(SetSessionAuthenticatedURL, gateway.Handler(handler.SetSessionAuthenticated)) internalAPI.POST(CreateSessionURL, gateway.Handler(handler.CreateSession)) internalAPI.POST(FinishSessionURL, gateway.Handler(handler.FinishSession)) internalAPI.POST(KeepAliveSessionURL, gateway.Handler(handler.KeepAliveSession)) - internalAPI.POST(RecordSessionURL, gateway.Handler(handler.RecordSession)) + internalAPI.PATCH(UpdateSessionURL, gateway.Handler(handler.UpdateSession)) internalAPI.GET(GetPublicKeyURL, gateway.Handler(handler.GetPublicKey)) internalAPI.POST(CreatePrivateKeyURL, gateway.Handler(handler.CreatePrivateKey)) internalAPI.POST(EvaluateKeyURL, gateway.Handler(handler.EvaluateKey)) + internalAPI.GET(EventsSessionsURL, gateway.Handler(handler.EventSession)) // Public routes for external access through API gateway - publicAPI := e.Group("/api") + publicAPI := router.Group("/api") + publicAPI.GET(HealthCheckURL, gateway.Handler(handler.EvaluateHealth)) + publicAPI.GET(AuthLocalUserURLV2, gateway.Handler(handler.CreateUserToken)) // TODO: method POST + publicAPI.GET(AuthUserTokenPublicURL, gateway.Handler(handler.CreateUserToken), routesmiddleware.BlockAPIKey) // TODO: method POST publicAPI.POST(AuthDeviceURL, gateway.Handler(handler.AuthDevice)) publicAPI.POST(AuthDeviceURLV2, gateway.Handler(handler.AuthDevice)) - publicAPI.POST(AuthUserURL, gateway.Handler(handler.AuthUser)) - publicAPI.POST(AuthUserURLV2, gateway.Handler(handler.AuthUser)) - publicAPI.GET(AuthUserURLV2, gateway.Handler(handler.AuthUserInfo)) + publicAPI.POST(AuthLocalUserURL, gateway.Handler(handler.AuthLocalUser)) + publicAPI.POST(AuthLocalUserURLV2, gateway.Handler(handler.AuthLocalUser)) publicAPI.POST(AuthPublicKeyURL, gateway.Handler(handler.AuthPublicKey)) - publicAPI.GET(AuthUserTokenPublicURL, gateway.Handler(handler.AuthSwapToken)) - - publicAPI.PATCH(UpdateUserDataURL, gateway.Handler(handler.UpdateUserData)) - publicAPI.PATCH(UpdateUserPasswordURL, gateway.Handler(handler.UpdateUserPassword)) - publicAPI.PUT(EditSessionRecordStatusURL, gateway.Handler(handler.EditSessionRecordStatus)) - publicAPI.GET(GetSessionRecordURL, gateway.Handler(handler.GetSessionRecord)) - publicAPI.GET(GetDeviceListURL, apiMiddleware.Authorize(gateway.Handler(handler.GetDeviceList))) - publicAPI.GET(GetDeviceURL, apiMiddleware.Authorize(gateway.Handler(handler.GetDevice))) - publicAPI.DELETE(DeleteDeviceURL, gateway.Handler(handler.DeleteDevice)) - publicAPI.PUT(UpdateDevice, gateway.Handler(handler.UpdateDevice)) - publicAPI.PATCH(RenameDeviceURL, gateway.Handler(handler.RenameDevice)) - publicAPI.PATCH(UpdateDeviceStatusURL, gateway.Handler(handler.UpdateDeviceStatus)) - - publicAPI.POST(CreateTagURL, gateway.Handler(handler.CreateDeviceTag)) - publicAPI.DELETE(RemoveTagURL, gateway.Handler(handler.RemoveDeviceTag)) - publicAPI.PUT(UpdateTagURL, gateway.Handler(handler.UpdateDeviceTag)) - - publicAPI.GET(GetTagsURL, gateway.Handler(handler.GetTags)) - publicAPI.PUT(RenameTagURL, gateway.Handler(handler.RenameTag)) - publicAPI.DELETE(DeleteTagsURL, gateway.Handler(handler.DeleteTag)) - - publicAPI.GET(GetSessionsURL, apiMiddleware.Authorize(gateway.Handler(handler.GetSessionList))) - publicAPI.GET(GetSessionURL, apiMiddleware.Authorize(gateway.Handler(handler.GetSession))) - publicAPI.GET(PlaySessionURL, gateway.Handler(handler.PlaySession)) - publicAPI.DELETE(RecordSessionURL, gateway.Handler(handler.DeleteRecordedSession)) - - publicAPI.GET(GetStatsURL, apiMiddleware.Authorize(gateway.Handler(handler.GetStats))) + publicAPI.POST(CreateAPIKeyURL, gateway.Handler(handler.CreateAPIKey), routesmiddleware.BlockAPIKey, routesmiddleware.RequiresPermission(authorizer.APIKeyCreate)) + publicAPI.GET(ListAPIKeysURL, gateway.Handler(handler.ListAPIKeys)) + publicAPI.PATCH(UpdateAPIKeyURL, gateway.Handler(handler.UpdateAPIKey), routesmiddleware.BlockAPIKey, routesmiddleware.RequiresPermission(authorizer.APIKeyUpdate)) + publicAPI.DELETE(DeleteAPIKeyURL, gateway.Handler(handler.DeleteAPIKey), routesmiddleware.BlockAPIKey, routesmiddleware.RequiresPermission(authorizer.APIKeyDelete)) + + publicAPI.PATCH(URLUpdateUser, gateway.Handler(handler.UpdateUser), routesmiddleware.BlockAPIKey) + publicAPI.PATCH(URLDeprecatedUpdateUser, gateway.Handler(handler.UpdateUser), routesmiddleware.BlockAPIKey) // WARN: DEPRECATED. + publicAPI.PATCH(URLDeprecatedUpdateUserPassword, gateway.Handler(handler.UpdateUserPassword), routesmiddleware.BlockAPIKey) // WARN: DEPRECATED. + + publicAPI.GET(GetDeviceListURL, routesmiddleware.Authorize(gateway.Handler(handler.GetDeviceList))) + publicAPI.GET(GetDeviceURL, routesmiddleware.Authorize(gateway.Handler(handler.GetDevice))) + publicAPI.GET(ResolveDeviceURL, routesmiddleware.Authorize(gateway.Handler(handler.ResolveDevice))) + publicAPI.PUT(UpdateDevice, gateway.Handler(handler.UpdateDevice), routesmiddleware.RequiresPermission(authorizer.DeviceUpdate)) + publicAPI.PATCH(RenameDeviceURL, gateway.Handler(handler.RenameDevice), routesmiddleware.RequiresPermission(authorizer.DeviceRename)) + publicAPI.PATCH(UpdateDeviceStatusURL, gateway.Handler(handler.UpdateDeviceStatus), routesmiddleware.RequiresPermission(authorizer.DeviceAccept)) // TODO: DeviceWrite + publicAPI.DELETE(DeleteDeviceURL, gateway.Handler(handler.DeleteDevice), routesmiddleware.RequiresPermission(authorizer.DeviceRemove)) + + publicAPI.GET(URLGetTags, gateway.Handler(handler.GetTags)) + publicAPI.POST(URLCreateTag, gateway.Handler(handler.CreateTag), routesmiddleware.RequiresPermission(authorizer.TagCreate)) + publicAPI.PATCH(URLUpdateTag, gateway.Handler(handler.UpdateTag), routesmiddleware.RequiresPermission(authorizer.TagUpdate)) + publicAPI.DELETE(URLDeleteTag, gateway.Handler(handler.DeleteTag), routesmiddleware.RequiresPermission(authorizer.TagDelete)) + publicAPI.POST(URLPushTagToDevice, gateway.Handler(handler.PushTagToDevice), routesmiddleware.RequiresPermission(authorizer.TagCreate)) + publicAPI.DELETE(URLPullTagFromDevice, gateway.Handler(handler.PullTagFromDevice), routesmiddleware.RequiresPermission(authorizer.TagDelete)) + + // NOTE: Legacy tag routes with tenant in path for backward compatibility. + publicAPI.GET(URLOldGetTags, gateway.Handler(handler.GetTags)) + publicAPI.POST(URLOldCreateTag, gateway.Handler(handler.CreateTag), routesmiddleware.RequiresPermission(authorizer.TagCreate)) + publicAPI.PATCH(URLOldUpdateTag, gateway.Handler(handler.UpdateTag), routesmiddleware.RequiresPermission(authorizer.TagUpdate)) + publicAPI.DELETE(URLOldDeleteTag, gateway.Handler(handler.DeleteTag), routesmiddleware.RequiresPermission(authorizer.TagDelete)) + publicAPI.POST(URLOldPushTagToDevice, gateway.Handler(handler.PushTagToDevice), routesmiddleware.RequiresPermission(authorizer.TagCreate)) + publicAPI.DELETE(URLOldPullTagFromDevice, gateway.Handler(handler.PullTagFromDevice), routesmiddleware.RequiresPermission(authorizer.TagDelete)) + + publicAPI.GET(GetSessionsURL, routesmiddleware.Authorize(gateway.Handler(handler.GetSessionList))) + publicAPI.GET(GetSessionURL, routesmiddleware.Authorize(gateway.Handler(handler.GetSession))) + + publicAPI.GET(GetStatsURL, routesmiddleware.Authorize(gateway.Handler(handler.GetStats))) publicAPI.GET(GetSystemInfoURL, gateway.Handler(handler.GetSystemInfo)) publicAPI.GET(GetSystemDownloadInstallScriptURL, gateway.Handler(handler.GetSystemDownloadInstallScript)) + publicAPI.POST(CreatePublicKeyURL, gateway.Handler(handler.CreatePublicKey), routesmiddleware.RequiresPermission(authorizer.PublicKeyCreate)) publicAPI.GET(GetPublicKeysURL, gateway.Handler(handler.GetPublicKeys)) - publicAPI.POST(CreatePublicKeyURL, gateway.Handler(handler.CreatePublicKey)) - publicAPI.PUT(UpdatePublicKeyURL, gateway.Handler(handler.UpdatePublicKey)) - publicAPI.DELETE(DeletePublicKeyURL, gateway.Handler(handler.DeletePublicKey)) - - publicAPI.POST(AddPublicKeyTagURL, gateway.Handler(handler.AddPublicKeyTag)) - publicAPI.DELETE(RemovePublicKeyTagURL, gateway.Handler(handler.RemovePublicKeyTag)) - publicAPI.PUT(UpdatePublicKeyTagsURL, gateway.Handler(handler.UpdatePublicKeyTags)) + publicAPI.PUT(UpdatePublicKeyURL, gateway.Handler(handler.UpdatePublicKey), routesmiddleware.RequiresPermission(authorizer.PublicKeyEdit)) + publicAPI.DELETE(DeletePublicKeyURL, gateway.Handler(handler.DeletePublicKey), routesmiddleware.RequiresPermission(authorizer.PublicKeyRemove)) - publicAPI.GET(ListNamespaceURL, gateway.Handler(handler.GetNamespaceList)) - publicAPI.GET(GetNamespaceURL, gateway.Handler(handler.GetNamespace)) publicAPI.POST(CreateNamespaceURL, gateway.Handler(handler.CreateNamespace)) - publicAPI.DELETE(DeleteNamespaceURL, gateway.Handler(handler.DeleteNamespace)) - publicAPI.PUT(EditNamespaceURL, gateway.Handler(handler.EditNamespace)) - publicAPI.POST(AddNamespaceUserURL, gateway.Handler(handler.AddNamespaceUser)) - publicAPI.DELETE(RemoveNamespaceUserURL, gateway.Handler(handler.RemoveNamespaceUser)) - publicAPI.PATCH(EditNamespaceUserURL, gateway.Handler(handler.EditNamespaceUser)) - publicAPI.GET(HealthCheckURL, gateway.Handler(handler.EvaluateHealth)) + publicAPI.GET(GetNamespaceURL, gateway.Handler(handler.GetNamespace)) + publicAPI.GET(ListNamespaceURL, gateway.Handler(handler.GetNamespaceList)) + publicAPI.PUT(EditNamespaceURL, gateway.Handler(handler.EditNamespace), routesmiddleware.RequiresPermission(authorizer.NamespaceUpdate)) + publicAPI.DELETE(DeleteNamespaceURL, gateway.Handler(handler.DeleteNamespace), routesmiddleware.RequiresPermission(authorizer.NamespaceDelete)) + + publicAPI.POST(AddNamespaceMemberURL, gateway.Handler(handler.AddNamespaceMember), routesmiddleware.RequiresPermission(authorizer.NamespaceAddMember)) + publicAPI.PATCH(EditNamespaceMemberURL, gateway.Handler(handler.EditNamespaceMember), routesmiddleware.RequiresPermission(authorizer.NamespaceEditMember)) + publicAPI.DELETE(RemoveNamespaceMemberURL, gateway.Handler(handler.RemoveNamespaceMember), routesmiddleware.RequiresPermission(authorizer.NamespaceRemoveMember)) + publicAPI.DELETE(LeaveNamespaceURL, gateway.Handler(handler.LeaveNamespace)) + + publicAPI.GET(GetSessionRecordURL, gateway.Handler(handler.GetSessionRecord)) + publicAPI.PUT(EditSessionRecordStatusURL, gateway.Handler(handler.EditSessionRecordStatus), routesmiddleware.RequiresPermission(authorizer.NamespaceEnableSessionRecord)) + + if !envs.IsCloud() { + publicAPI.POST(SetupEndpoint, gateway.Handler(handler.Setup)) + } + + // NOTE: Rewrite requests to containers to devices, as they are the same thing under the hood, using it as an alias. + router.Pre(echoMiddleware.Rewrite(map[string]string{ + "/api/containers": "/api/devices?connector=true", + "/api/containers?*": "/api/devices?$1&connector=true", + "/api/containers/*": "/api/devices/$1", + })) - return e + return router } diff --git a/api/routes/session.go b/api/routes/session.go index 7db991967a7..b17586fbd1e 100644 --- a/api/routes/session.go +++ b/api/routes/session.go @@ -5,20 +5,20 @@ import ( "strconv" "github.com/shellhub-io/shellhub/api/pkg/gateway" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/websocket" + log "github.com/sirupsen/logrus" ) const ( - GetSessionsURL = "/sessions" - GetSessionURL = "/sessions/:uid" - SetSessionAuthenticatedURL = "/sessions/:uid" - CreateSessionURL = "/sessions" - FinishSessionURL = "/sessions/:uid/finish" - KeepAliveSessionURL = "/sessions/:uid/keepalive" - RecordSessionURL = "/sessions/:uid/record" - PlaySessionURL = "/sessions/:uid/play" + GetSessionsURL = "/sessions" + GetSessionURL = "/sessions/:uid" + UpdateSessionURL = "/sessions/:uid" + CreateSessionURL = "/sessions" + FinishSessionURL = "/sessions/:uid/finish" + KeepAliveSessionURL = "/sessions/:uid/keepalive" + EventsSessionsURL = "/sessions/:uid/events" ) const ( @@ -26,15 +26,20 @@ const ( ) func (h *Handler) GetSessionList(c gateway.Context) error { - query := paginator.NewQuery() - if err := c.Bind(query); err != nil { + req := new(requests.ListSessions) + + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { return err } // TODO: normalize is not required when request is privileged - query.Normalize() + req.Paginator.Normalize() - sessions, count, err := h.service.ListSessions(c.Ctx(), *query) + sessions, count, err := h.service.ListSessions(c.Ctx(), req) if err != nil { return err } @@ -62,8 +67,8 @@ func (h *Handler) GetSession(c gateway.Context) error { return c.JSON(http.StatusOK, session) } -func (h *Handler) SetSessionAuthenticated(c gateway.Context) error { - var req requests.SessionAuthenticatedSet +func (h *Handler) UpdateSession(c gateway.Context) error { + var req requests.SessionUpdate if err := c.Bind(&req); err != nil { return err } @@ -72,7 +77,11 @@ func (h *Handler) SetSessionAuthenticated(c gateway.Context) error { return err } - return h.service.SetSessionAuthenticated(c.Ctx(), models.UID(req.UID), req.Authenticated) + return h.service.UpdateSession(c.Ctx(), models.UID(req.UID), models.SessionUpdate{ + Recorded: req.Recorded, + Authenticated: req.Authenticated, + Type: req.Type, + }) } func (h *Handler) CreateSession(c gateway.Context) error { @@ -119,14 +128,54 @@ func (h *Handler) KeepAliveSession(c gateway.Context) error { return h.service.KeepAliveSession(c.Ctx(), models.UID(req.UID)) } -func (h *Handler) RecordSession(c gateway.Context) error { - return c.NoContent(http.StatusOK) -} +func (h *Handler) EventSession(c gateway.Context) error { + var req requests.SessionIDParam -func (h *Handler) PlaySession(c gateway.Context) error { - return c.NoContent(http.StatusOK) -} + if err := c.Bind(&req); err != nil { + return err + } -func (h *Handler) DeleteRecordedSession(c gateway.Context) error { - return c.NoContent(http.StatusOK) + if err := c.Validate(&req); err != nil { + return err + } + + if !c.IsWebSocket() { + return c.NoContent(http.StatusBadRequest) + } + + connection, err := h.WebSocketUpgrader.Upgrade(c.Response(), c.Request()) + if err != nil { + return c.NoContent(http.StatusBadRequest) + } + + defer connection.Close() + + var r requests.SessionEvent + for { + if err := connection.ReadJSON(&r); err != nil { + if websocket.IsErrorCloseNormal(err) || websocket.IsUnexpectedCloseError(err) { + log.WithError(err).WithFields(log.Fields{ + "uid": req.UID, + }).Debug("events websocket closed with a ignored error") + + return nil + } + + return err + } + + if err := c.Validate(&r); err != nil { + return err + } + + if err := h.service.EventSession(c.Ctx(), models.UID(req.UID), &models.SessionEvent{ + Session: req.UID, + Type: models.SessionEventType(r.Type), + Timestamp: r.Timestamp, + Data: r.Data, + Seat: r.Seat, + }); err != nil { + return err + } + } } diff --git a/api/routes/session_test.go b/api/routes/session_test.go index f18d17f128a..f8092d7d4bc 100644 --- a/api/routes/session_test.go +++ b/api/routes/session_test.go @@ -2,6 +2,7 @@ package routes import ( "encoding/json" + "errors" "fmt" "io" "net/http" @@ -9,15 +10,17 @@ import ( "strings" "testing" + "github.com/gorilla/websocket" + "github.com/labstack/echo/v4" + "github.com/shellhub-io/shellhub/api/pkg/gateway" svc "github.com/shellhub-io/shellhub/api/services" - - "github.com/shellhub-io/shellhub/api/store" - - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/services/mocks" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" + websocketmocks "github.com/shellhub-io/shellhub/pkg/websocket/mocks" "github.com/stretchr/testify/assert" gomock "github.com/stretchr/testify/mock" ) @@ -30,19 +33,21 @@ func TestGetSessionList(t *testing.T) { expectedStatus int } cases := []struct { - title string - payload paginator.Query - requiredMocks func(payload *paginator.Query) + description string + paginator query.Paginator + headers map[string]string + requiredMocks func() expected Expected }{ { - title: "fails when try to searching a session list of a existing session", - payload: paginator.Query{ - Page: 1, - PerPage: 10, - }, - requiredMocks: func(payload *paginator.Query) { - mock.On("ListSessions", gomock.Anything, *payload).Return(nil, 0, svc.ErrNotFound).Once() + description: "fails when try to searching a session list of a existing session", + paginator: query.Paginator{Page: 1, PerPage: 10}, + headers: map[string]string{"X-Tenant-ID": "00000000-0000-4000-0000-000000000000"}, + requiredMocks: func() { + mock. + On("ListSessions", gomock.Anything, &requests.ListSessions{Paginator: query.Paginator{Page: 1, PerPage: 10}, TenantID: "00000000-0000-4000-0000-000000000000"}). + Return(nil, 0, svc.ErrNotFound). + Once() }, expected: Expected{ expectedSession: nil, @@ -50,14 +55,14 @@ func TestGetSessionList(t *testing.T) { }, }, { - title: "success when try to searching a session list of a existing session", - payload: paginator.Query{ - Page: 1, - PerPage: 10, - }, - requiredMocks: func(payload *paginator.Query) { - ss := []models.Session{} - mock.On("ListSessions", gomock.Anything, *payload).Return(ss, 1, nil).Once() + description: "success when try to searching a session list of a existing session", + paginator: query.Paginator{Page: 1, PerPage: 10}, + headers: map[string]string{"X-Tenant-ID": "00000000-0000-4000-0000-000000000000"}, + requiredMocks: func() { + mock. + On("ListSessions", gomock.Anything, &requests.ListSessions{Paginator: query.Paginator{Page: 1, PerPage: 10}, TenantID: "00000000-0000-4000-0000-000000000000"}). + Return([]models.Session{}, 1, nil). + Once() }, expected: Expected{ expectedSession: []models.Session{}, @@ -67,17 +72,21 @@ func TestGetSessionList(t *testing.T) { } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(&tc.payload) + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() - jsonData, err := json.Marshal(tc.payload) + jsonData, err := json.Marshal(tc.paginator) if err != nil { assert.NoError(t, err) } req := httptest.NewRequest(http.MethodGet, "/api/sessions", strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + rec := httptest.NewRecorder() e := NewRouter(mock) @@ -148,7 +157,7 @@ func TestGetSession(t *testing.T) { req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/api/sessions/%s", tc.uid), nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -248,7 +257,7 @@ func TestCreateSession(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/internal/sessions", strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -300,7 +309,7 @@ func TestFinishSession(t *testing.T) { req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("/internal/sessions/%s/finish", tc.uid), nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -312,3 +321,156 @@ func TestFinishSession(t *testing.T) { mock.AssertExpectations(t) } + +func TestEventSession(t *testing.T) { + mock := new(mocks.Service) + webSocketUpgraderMock := new(websocketmocks.Upgrader) + + cases := []struct { + description string + uid string + seat int + requiredMocks func(uid string) + expected int + }{ + { + description: "fails when upgrade cannot be done", + uid: "123", + seat: 0, + requiredMocks: func(_ string) { + webSocketUpgraderMock.On("Upgrade", gomock.Anything, gomock.Anything).Return(nil, errors.New("")).Once() + }, + expected: http.StatusBadRequest, + }, + { + description: "fails when cannot read from websocket due error", + uid: "123", + seat: 0, + requiredMocks: func(_ string) { + conn := new(websocketmocks.Conn) + conn.On("Close").Return(nil).Once() + conn.On("ReadJSON", gomock.Anything).Return(io.EOF).Once() + + webSocketUpgraderMock.On("Upgrade", gomock.Anything, gomock.Anything).Return(conn, nil).Once() + }, + expected: http.StatusInternalServerError, + }, + { + description: "fails when cannot read from websocket due generic error", + uid: "123", + seat: 0, + requiredMocks: func(_ string) { + conn := new(websocketmocks.Conn) + conn.On("Close").Return(nil).Once() + conn.On("ReadJSON", gomock.Anything).Return(errors.New("")).Once() + + webSocketUpgraderMock.On("Upgrade", gomock.Anything, gomock.Anything).Return(conn, nil).Once() + }, + expected: http.StatusInternalServerError, + }, + { + description: "fails when record frame is invalid", + uid: "123", + seat: 0, + requiredMocks: func(_ string) { + conn := new(websocketmocks.Conn) + conn.On("Close").Return(nil).Once() + conn.On("ReadJSON", gomock.Anything).Return(nil).Once().Run(func(args gomock.Arguments) { + req := args.Get(0).(*requests.SessionEvent) //nolint:forcetypeassert + + json. + NewDecoder(strings.NewReader(`{}`)). + Decode(req) //nolint:errcheck + }) + + webSocketUpgraderMock.On("Upgrade", gomock.Anything, gomock.Anything).Return(conn, nil).Once() + }, + expected: http.StatusBadRequest, + }, + { + description: "fails to write the frame on the database", + uid: "123", + seat: 0, + requiredMocks: func(uid string) { + conn := new(websocketmocks.Conn) + conn.On("Close").Return(nil).Once() + conn.On("NextReader").Return().Once() + conn.On("ReadJSON", gomock.Anything).Return(nil).Once().Run(func(args gomock.Arguments) { + req := args.Get(0).(*requests.SessionEvent) //nolint:forcetypeassert + + json. + NewDecoder(strings.NewReader(`{"type":"pty-output","timestamp":"2025-02-03T14:11:32.405Z","data": { "output":"test" },"seat": 0}`)). + Decode(req) //nolint:errcheck + }) + + webSocketUpgraderMock.On("Upgrade", gomock.Anything, gomock.Anything).Return(conn, nil).Once() + + mock.On("EventSession", gomock.Anything, models.UID(uid), gomock.Anything). + Return(errors.New("not able record")).Once() + }, + expected: http.StatusInternalServerError, + }, + { + description: "success to write one frame on database", + uid: "123", + seat: 0, + requiredMocks: func(uid string) { + conn := new(websocketmocks.Conn) + conn.On("Close").Return(nil).Once() + conn.On("NextReader").Return().Once() + conn.On("ReadJSON", gomock.Anything).Return(nil).Once().Run(func(args gomock.Arguments) { + req := args.Get(0).(*requests.SessionEvent) //nolint:forcetypeassert + + json. + NewDecoder(strings.NewReader(`{"type":"pty-output","timestamp":"2025-02-03T14:11:32.405Z","data": { "output":"test" },"seat": 0}`)). + Decode(req) //nolint:errcheck + }) + + webSocketUpgraderMock.On("Upgrade", gomock.Anything, gomock.Anything).Return(conn, nil).Once() + + mock.On("EventSession", gomock.Anything, models.UID(uid), + gomock.Anything).Return(nil).Once() + + conn.On("ReadJSON", gomock.Anything).Return(&websocket.CloseError{ + Code: 1000, + Text: "test", + }).Once() + }, + expected: http.StatusOK, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks(tc.uid) + + req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("ws:///internal/sessions/%s/events", tc.uid), nil) + req.Header.Set("Content-Type", echo.MIMEApplicationJSON) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) + req.Header.Set("Upgrade", "websocket") + req.Header.Set("Connection", "Upgrade") + req.Header.Set("Sec-WebSocket-Version", "13") + req.Header.Set("Sec-WebSocket-Key", "test") + + e := NewRouter(mock, func(_ *echo.Echo, handler *Handler) error { + handler.WebSocketUpgrader = webSocketUpgraderMock + + return nil + }) + + e.Use(func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + ctx := gateway.NewContext(mock, c) + + return next(ctx) + } + }) + + rec := httptest.NewRecorder() + e.ServeHTTP(rec, req) + + assert.Equal(t, tc.expected, rec.Result().StatusCode) + mock.AssertExpectations(t) + }) + } +} diff --git a/api/routes/setup.go b/api/routes/setup.go index b625af709dc..35da1f73cb6 100644 --- a/api/routes/setup.go +++ b/api/routes/setup.go @@ -7,9 +7,17 @@ import ( "github.com/shellhub-io/shellhub/pkg/api/requests" ) -const SetupEndpoint = "/setup" +const ( + SetupEndpoint = "/setup" + SetupSignQuery = "sign" +) func (h *Handler) Setup(c gateway.Context) error { + sign := c.QueryParam(SetupSignQuery) + if sign == "" { + return c.NoContent(http.StatusBadRequest) + } + var req requests.Setup if err := c.Bind(&req); err != nil { return err @@ -19,6 +27,10 @@ func (h *Handler) Setup(c gateway.Context) error { return err } + if err := h.service.SetupVerify(c.Ctx(), sign); err != nil { + return err + } + if err := h.service.Setup(c.Ctx(), req); err != nil { return err } diff --git a/api/routes/setup_test.go b/api/routes/setup_test.go new file mode 100644 index 00000000000..fcfe2cc08cf --- /dev/null +++ b/api/routes/setup_test.go @@ -0,0 +1,135 @@ +package routes + +import ( + "errors" + "fmt" + "net/http" + "net/http/httptest" + "strings" + "testing" + + serviceMocks "github.com/shellhub-io/shellhub/api/services/mocks" + "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/envs" + envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestSetup(t *testing.T) { + envMock := new(envMocks.Backend) + envs.DefaultBackend = envMock + + envMock.On("Get", "SHELLHUB_CLOUD").Return("false") + + servicesMock := new(serviceMocks.Service) + + tests := []struct { + description string + queries string + body string + requiredMocks func() + expected int + }{ + { + description: "fail to get the signature", + body: "", + requiredMocks: func() {}, + expected: http.StatusBadRequest, + }, + { + description: "fail to parse the json body", + queries: "?sign=value", + body: "", + requiredMocks: func() {}, + expected: http.StatusBadRequest, + }, + { + description: "fail to valid the json body", + queries: "?sign=value", + body: `{ + "name": "John Doe", + "username": "john.doe", + "email": "john.doe", + "password": "123" + }`, + requiredMocks: func() {}, + expected: http.StatusBadRequest, + }, + { + description: "fail to validate the signature", + queries: "?sign=value", + body: `{ + "name": "John Doe", + "username": "john.doe", + "email": "john.doe@example.com", + "password": "password" + }`, + requiredMocks: func() { + servicesMock.On("SetupVerify", mock.Anything, "value").Return(errors.New("")).Once() + }, + expected: http.StatusInternalServerError, + }, + { + description: "fail to setup on service", + queries: "?sign=value", + body: `{ + "name": "John Doe", + "username": "john.doe", + "email": "john.doe@example.com", + "password": "password" + }`, + requiredMocks: func() { + servicesMock.On("SetupVerify", mock.Anything, "value").Return(nil).Once() + + servicesMock.On("Setup", mock.Anything, requests.Setup{ + Name: "John Doe", + Username: "john.doe", + Email: "john.doe@example.com", + Password: "password", + }).Return(errors.New("")).Once() + }, + expected: http.StatusInternalServerError, + }, + { + description: "success to setup on service", + queries: "?sign=value", + body: `{ + "name": "John Doe", + "username": "john.doe", + "email": "john.doe@example.com", + "password": "password" + }`, + requiredMocks: func() { + servicesMock.On("SetupVerify", mock.Anything, "value").Return(nil).Once() + + servicesMock.On("Setup", mock.Anything, requests.Setup{ + Name: "John Doe", + Username: "john.doe", + Email: "john.doe@example.com", + Password: "password", + }).Return(nil).Once() + }, + expected: http.StatusOK, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + test.requiredMocks() + + req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("%s%s", "/api/setup", test.queries), strings.NewReader(test.body)) + req.Header.Set("Content-Type", "application/json") + rec := httptest.NewRecorder() + + router := NewRouter(servicesMock) + router.ServeHTTP(rec, req) + + result := rec.Result() + + assert.Equal(t, test.expected, result.StatusCode) + }) + } + + envMock.AssertExpectations(t) +} diff --git a/api/routes/sshkeys.go b/api/routes/sshkeys.go index 6fe040b278b..651648746ca 100644 --- a/api/routes/sshkeys.go +++ b/api/routes/sshkeys.go @@ -2,28 +2,23 @@ package routes import ( "net/http" + "net/url" "strconv" "github.com/shellhub-io/shellhub/api/pkg/gateway" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/api/requests" - "github.com/shellhub-io/shellhub/pkg/api/responses" "github.com/shellhub-io/shellhub/pkg/models" ) const ( - GetPublicKeysURL = "/sshkeys/public-keys" - GetPublicKeyURL = "/sshkeys/public-keys/:fingerprint/:tenant" - CreatePublicKeyURL = "/sshkeys/public-keys" - UpdatePublicKeyURL = "/sshkeys/public-keys/:fingerprint" - DeletePublicKeyURL = "/sshkeys/public-keys/:fingerprint" - CreatePrivateKeyURL = "/sshkeys/private-keys" - EvaluateKeyURL = "/sshkeys/public-keys/evaluate/:fingerprint/:username" - AddPublicKeyTagURL = "/sshkeys/public-keys/:fingerprint/tags" // Add a tag to a public key. - RemovePublicKeyTagURL = "/sshkeys/public-keys/:fingerprint/tags/:tag" // Remove a tag to a public key. - UpdatePublicKeyTagsURL = "/sshkeys/public-keys/:fingerprint/tags" // Update all tags from a public key. + GetPublicKeysURL = "/sshkeys/public-keys" + GetPublicKeyURL = "/sshkeys/public-keys/:fingerprint/:tenant" + CreatePublicKeyURL = "/sshkeys/public-keys" + UpdatePublicKeyURL = "/sshkeys/public-keys/:fingerprint" + DeletePublicKeyURL = "/sshkeys/public-keys/:fingerprint" + CreatePrivateKeyURL = "/sshkeys/private-keys" + EvaluateKeyURL = "/sshkeys/public-keys/evaluate/:fingerprint/:username" ) const ( @@ -31,15 +26,20 @@ const ( ) func (h *Handler) GetPublicKeys(c gateway.Context) error { - query := paginator.NewQuery() - if err := c.Bind(query); err != nil { + req := new(requests.ListPublicKeys) + + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { return err } // TODO: normalize is not required when request is privileged - query.Normalize() + req.Paginator.Normalize() - list, count, err := h.service.ListPublicKeys(c.Ctx(), *query) + list, count, err := h.service.ListPublicKeys(c.Ctx(), req) if err != nil { return err } @@ -89,13 +89,7 @@ func (h *Handler) CreatePublicKey(c gateway.Context) error { req.TenantID = tenant } - var res *responses.PublicKeyCreate - err := guard.EvaluatePermission(c.Role(), guard.Actions.PublicKey.Create, func() error { - var err error - res, err = h.service.CreatePublicKey(c.Ctx(), req, tenant) - - return err - }) + res, err := h.service.CreatePublicKey(c.Ctx(), req, tenant) if err != nil { return err } @@ -118,18 +112,12 @@ func (h *Handler) UpdatePublicKey(c gateway.Context) error { tenant = c.Tenant().ID } - var key *models.PublicKey - err := guard.EvaluatePermission(c.Role(), guard.Actions.PublicKey.Edit, func() error { - var err error - key, err = h.service.UpdatePublicKey(c.Ctx(), req.Fingerprint, tenant, req) - - return err - }) + res, err := h.service.UpdatePublicKey(c.Ctx(), req.Fingerprint, tenant, req) if err != nil { return err } - return c.JSON(http.StatusOK, key) + return c.JSON(http.StatusOK, res) } func (h *Handler) DeletePublicKey(c gateway.Context) error { @@ -138,6 +126,10 @@ func (h *Handler) DeletePublicKey(c gateway.Context) error { return err } + // NOTE: This is a temporary workaround. + // TODO: Investigate why echo is not decoding the Fingerprint. + req.Fingerprint, _ = url.QueryUnescape(req.Fingerprint) + if err := c.Validate(&req); err != nil { return err } @@ -147,12 +139,7 @@ func (h *Handler) DeletePublicKey(c gateway.Context) error { tenant = c.Tenant().ID } - err := guard.EvaluatePermission(c.Role(), guard.Actions.PublicKey.Remove, func() error { - err := h.service.DeletePublicKey(c.Ctx(), req.Fingerprint, tenant) - - return err - }) - if err != nil { + if err := h.service.DeletePublicKey(c.Ctx(), req.Fingerprint, tenant); err != nil { return err } @@ -191,78 +178,3 @@ func (h *Handler) EvaluateKey(c gateway.Context) error { return c.JSON(http.StatusOK, usernameOk && filterOk) } - -func (h *Handler) AddPublicKeyTag(c gateway.Context) error { - var req requests.PublicKeyTagAdd - if err := c.Bind(&req); err != nil { - return err - } - - if err := c.Validate(&req); err != nil { - return err - } - - var tenant string - if c.Tenant() != nil { - tenant = c.Tenant().ID - } - - err := guard.EvaluatePermission(c.Role(), guard.Actions.PublicKey.AddTag, func() error { - return h.service.AddPublicKeyTag(c.Ctx(), tenant, req.Fingerprint, req.Tag) - }) - if err != nil { - return err - } - - return c.NoContent(http.StatusOK) -} - -func (h *Handler) RemovePublicKeyTag(c gateway.Context) error { - var req requests.PublicKeyTagRemove - if err := c.Bind(&req); err != nil { - return err - } - - if err := c.Validate(&req); err != nil { - return err - } - - var tenant string - if c.Tenant() != nil { - tenant = c.Tenant().ID - } - - err := guard.EvaluatePermission(c.Role(), guard.Actions.PublicKey.RemoveTag, func() error { - return h.service.RemovePublicKeyTag(c.Ctx(), tenant, req.Fingerprint, req.Tag) - }) - if err != nil { - return err - } - - return c.NoContent(http.StatusOK) -} - -func (h *Handler) UpdatePublicKeyTags(c gateway.Context) error { - var req requests.PublicKeyTagsUpdate - if err := c.Bind(&req); err != nil { - return err - } - - if err := c.Validate(&req); err != nil { - return err - } - - var tenant string - if c.Tenant() != nil { - tenant = c.Tenant().ID - } - - err := guard.EvaluatePermission(c.Role(), guard.Actions.PublicKey.UpdateTag, func() error { - return h.service.UpdatePublicKeyTags(c.Ctx(), tenant, req.Fingerprint, req.Tags) - }) - if err != nil { - return err - } - - return c.NoContent(http.StatusOK) -} diff --git a/api/routes/sshkeys_test.go b/api/routes/sshkeys_test.go index 4be9a01471b..65d79434f3c 100644 --- a/api/routes/sshkeys_test.go +++ b/api/routes/sshkeys_test.go @@ -9,10 +9,10 @@ import ( "strings" "testing" - "github.com/shellhub-io/shellhub/api/pkg/guard" svc "github.com/shellhub-io/shellhub/api/services" "github.com/shellhub-io/shellhub/api/services/mocks" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" @@ -27,19 +27,21 @@ func TestGetPublicKeys(t *testing.T) { expectedStatus int } cases := []struct { - title string - query paginator.Query - requiredMocks func(query paginator.Query) + description string + paginator query.Paginator + headers map[string]string + requiredMocks func() expected Expected }{ { - title: "success when try to list a publics keys exists", - query: paginator.Query{ - Page: 1, - PerPage: 10, - }, - requiredMocks: func(query paginator.Query) { - mock.On("ListPublicKeys", gomock.Anything, query).Return([]models.PublicKey{}, 1, nil) + description: "success when try to list a publics keys exists", + paginator: query.Paginator{Page: 1, PerPage: 10}, + headers: map[string]string{"X-Tenant-ID": "00000000-0000-4000-0000-000000000000"}, + requiredMocks: func() { + mock. + On("ListPublicKeys", gomock.Anything, &requests.ListPublicKeys{Paginator: query.Paginator{Page: 1, PerPage: 10}, TenantID: "00000000-0000-4000-0000-000000000000"}). + Return([]models.PublicKey{}, 1, nil). + Once() }, expected: Expected{ expectedSession: []models.PublicKey{}, @@ -49,17 +51,21 @@ func TestGetPublicKeys(t *testing.T) { } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.query) + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() - jsonData, err := json.Marshal(tc.query) + jsonData, err := json.Marshal(tc.paginator) if err != nil { assert.NoError(t, err) } req := httptest.NewRequest(http.MethodGet, "/api/sshkeys/public-keys", strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + rec := httptest.NewRecorder() e := NewRouter(mock) @@ -92,10 +98,10 @@ func TestGetPublicKey(t *testing.T) { { title: "fails when validate because the tag does not have a min of 3 characters", query: requests.PublicKeyGet{ - TenantParam: requests.TenantParam{Tenant: "tg"}, + TenantParam: requests.TenantParam{Tenant: "00000000-0000-4000-0000-000000000000"}, }, expected: Expected{expectedStatus: http.StatusBadRequest}, - requiredMocks: func(req requests.PublicKeyGet) {}, + requiredMocks: func(_ requests.PublicKeyGet) {}, }, { title: "fails when validate because the tag does not have a max of 255 characters", @@ -103,7 +109,7 @@ func TestGetPublicKey(t *testing.T) { TenantParam: requests.TenantParam{Tenant: "BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9"}, }, expected: Expected{expectedStatus: http.StatusBadRequest}, - requiredMocks: func(req requests.PublicKeyGet) {}, + requiredMocks: func(_ requests.PublicKeyGet) {}, }, { title: "fails when validate because have a '/' with in your characters", @@ -111,7 +117,7 @@ func TestGetPublicKey(t *testing.T) { TenantParam: requests.TenantParam{Tenant: "test/"}, }, expected: Expected{expectedStatus: http.StatusBadRequest}, - requiredMocks: func(req requests.PublicKeyGet) {}, + requiredMocks: func(_ requests.PublicKeyGet) {}, }, { title: "fails when validate because have a '&' with in your characters", @@ -119,7 +125,7 @@ func TestGetPublicKey(t *testing.T) { TenantParam: requests.TenantParam{Tenant: "test&"}, }, expected: Expected{expectedStatus: http.StatusBadRequest}, - requiredMocks: func(req requests.PublicKeyGet) {}, + requiredMocks: func(_ requests.PublicKeyGet) {}, }, { title: "fails when validate because have a '@' with in your characters", @@ -127,13 +133,13 @@ func TestGetPublicKey(t *testing.T) { TenantParam: requests.TenantParam{Tenant: "test@"}, }, expected: Expected{expectedStatus: http.StatusBadRequest}, - requiredMocks: func(req requests.PublicKeyGet) {}, + requiredMocks: func(_ requests.PublicKeyGet) {}, }, { title: "success when try to get a public key exists", query: requests.PublicKeyGet{ - FingerprintParam: requests.FingerprintParam{Fingerprint: "figertest"}, - TenantParam: requests.TenantParam{Tenant: "tenant"}, + FingerprintParam: requests.FingerprintParam{Fingerprint: "fingerprint"}, + TenantParam: requests.TenantParam{Tenant: "00000000-0000-4000-0000-000000000000"}, }, requiredMocks: func(query requests.PublicKeyGet) { mock.On("GetPublicKey", gomock.Anything, query.Fingerprint, query.Tenant).Return(&models.PublicKey{}, nil) @@ -156,7 +162,7 @@ func TestGetPublicKey(t *testing.T) { req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/internal/sshkeys/public-keys/%s/%s", tc.query.Fingerprint, tc.query.Tenant), strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -174,232 +180,113 @@ func TestGetPublicKey(t *testing.T) { } func TestDeletePublicKey(t *testing.T) { - mock := new(mocks.Service) - - cases := []struct { - title string - query requests.PublicKeyDelete - requiredMocks func(query requests.PublicKeyDelete) - expectedStatus int - }{ - { - title: "fails when bind fails to validate uid", - query: requests.PublicKeyDelete{ - FingerprintParam: requests.FingerprintParam{Fingerprint: ""}, - }, - requiredMocks: func(query requests.PublicKeyDelete) {}, - expectedStatus: http.StatusNotFound, - }, - { - title: "fails when try to deleting an existing public key", - query: requests.PublicKeyDelete{ - FingerprintParam: requests.FingerprintParam{Fingerprint: "figertest"}, - }, - requiredMocks: func(query requests.PublicKeyDelete) { - mock.On("DeletePublicKey", gomock.Anything, query.Fingerprint, "tenant").Return(svc.ErrNotFound).Once() - }, - expectedStatus: http.StatusNotFound, - }, - { - title: "success when try to deleting an existing public key", - query: requests.PublicKeyDelete{ - FingerprintParam: requests.FingerprintParam{Fingerprint: "figertest"}, - }, - requiredMocks: func(query requests.PublicKeyDelete) { - mock.On("DeletePublicKey", gomock.Anything, query.Fingerprint, "tenant").Return(nil).Once() - }, - expectedStatus: http.StatusOK, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.query) - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/sshkeys/public-keys/%s", tc.query.Fingerprint), nil) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", "tenant") - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) - }) + type Expected struct { + status int } -} -func TestRemovePublicKeyTag(t *testing.T) { - mock := new(mocks.Service) + svcMock := new(mocks.Service) cases := []struct { - title string - query requests.PublicKeyTagRemove - tenant string - requiredMocks func(query requests.PublicKeyTagRemove) - expectedStatus int + description string + fingerprint string + headers map[string]string + requiredMocks func() + expected Expected }{ { - title: "fails when validate because the tag does not have a min of 3 characters", - query: requests.PublicKeyTagRemove{ - TagParam: requests.TagParam{Tag: "tg"}, - }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagRemove) {}, - }, - { - title: "fails when validate because the tag does not have a max of 255 characters", - query: requests.PublicKeyTagRemove{ - TagParam: requests.TagParam{Tag: "BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9"}, - }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagRemove) {}, - }, - { - title: "fails when validate because have a '/' with in your characters", - query: requests.PublicKeyTagRemove{ - TagParam: requests.TagParam{Tag: "test/"}, - }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagRemove) {}, - }, - { - title: "fails when validate because have a '&' with in your characters", - query: requests.PublicKeyTagRemove{ - TagParam: requests.TagParam{Tag: "test&"}, + description: "fails when role is observer", + fingerprint: "8e:b3:e2:ce:3c:6c:27:ff:51:c9:5d:77:af:92:2f:d8", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "observer", + "X-ID": "000000000000000000000000", }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagRemove) {}, - }, - { - title: "fails when validate because have a '@' with in your characters", - query: requests.PublicKeyTagRemove{ - TagParam: requests.TagParam{Tag: "test@"}, + requiredMocks: func() { }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagRemove) {}, + expected: Expected{status: http.StatusForbidden}, }, { - title: "success when try to removing an existing public key", - query: requests.PublicKeyTagRemove{ - FingerprintParam: requests.FingerprintParam{Fingerprint: "figertest"}, - TagParam: requests.TagParam{Tag: "tag"}, + description: "fails when role is operator", + fingerprint: "8e:b3:e2:ce:3c:6c:27:ff:51:c9:5d:77:af:92:2f:d8", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "operator", + "X-ID": "000000000000000000000000", }, - tenant: "tenant-id", - requiredMocks: func(query requests.PublicKeyTagRemove) { - mock.On("RemovePublicKeyTag", gomock.Anything, "tenant-id", query.Fingerprint, query.Tag).Return(nil) - }, - expectedStatus: http.StatusOK, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.query) - - jsonData, err := json.Marshal(tc.query) - if err != nil { - assert.NoError(t, err) - } - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/sshkeys/public-keys/%s/tags/%s", tc.query.Fingerprint, tc.query.Tag), strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", tc.tenant) - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) - }) - } -} - -func TestAddPublicKeyTagURL(t *testing.T) { - mock := new(mocks.Service) - - cases := []struct { - title string - query requests.PublicKeyTagAdd - tenant string - requiredMocks func(query requests.PublicKeyTagAdd) - expectedStatus int - }{ - { - title: "fails when validate because the tag does not have a min of 3 characters", - query: requests.PublicKeyTagAdd{ - TagParam: requests.TagParam{Tag: "tg"}, + requiredMocks: func() { }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagAdd) {}, + expected: Expected{status: http.StatusForbidden}, }, { - title: "fails when validate because the tag does not have a max of 255 characters", - query: requests.PublicKeyTagAdd{ - TagParam: requests.TagParam{Tag: "BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9"}, + description: "fails when try to deleting a non existent public key", + fingerprint: "8e:b3:e2:ce:3c:6c:27:ff:51:c9:5d:77:af:92:2f:d8", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + "X-ID": "000000000000000000000000", }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagAdd) {}, - }, - { - title: "fails when validate because have a '/' with in your characters", - query: requests.PublicKeyTagAdd{ - TagParam: requests.TagParam{Tag: "test/"}, + requiredMocks: func() { + svcMock. + On("DeletePublicKey", gomock.Anything, "8e:b3:e2:ce:3c:6c:27:ff:51:c9:5d:77:af:92:2f:d8", "00000000-0000-4000-0000-000000000000"). + Return(svc.ErrNotFound). + Once() }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagAdd) {}, + expected: Expected{status: http.StatusNotFound}, }, { - title: "fails when validate because have a '&' with in your characters", - query: requests.PublicKeyTagAdd{ - TagParam: requests.TagParam{Tag: "test&"}, + description: "success when fingerprint is encoded", + fingerprint: "8e%3Ab3%3Ae2%3Ace%3A3c%3A6c%3A27%3Aff%3A51%3Ac9%3A5d%3A77%3Aaf%3A92%3A2f%3Ad8", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + "X-ID": "000000000000000000000000", }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagAdd) {}, - }, - { - title: "fails when validate because have a '@' with in your characters", - query: requests.PublicKeyTagAdd{ - TagParam: requests.TagParam{Tag: "test@"}, + requiredMocks: func() { + svcMock. + On("DeletePublicKey", gomock.Anything, "8e:b3:e2:ce:3c:6c:27:ff:51:c9:5d:77:af:92:2f:d8", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() }, - expectedStatus: http.StatusBadRequest, - requiredMocks: func(req requests.PublicKeyTagAdd) {}, + expected: Expected{status: http.StatusOK}, }, { - title: "success when try to add an existing public tag key", - query: requests.PublicKeyTagAdd{ - FingerprintParam: requests.FingerprintParam{Fingerprint: "figertest"}, - TagParam: requests.TagParam{Tag: "tag"}, + description: "success when try to deleting an existing public key", + fingerprint: "8e:b3:e2:ce:3c:6c:27:ff:51:c9:5d:77:af:92:2f:d8", + headers: map[string]string{ + "Content-Type": "application/json", + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + "X-Role": "owner", + "X-ID": "000000000000000000000000", }, - tenant: "tenant-id", - requiredMocks: func(query requests.PublicKeyTagAdd) { - mock.On("AddPublicKeyTag", gomock.Anything, "tenant-id", query.Fingerprint, query.Tag).Return(nil).Once() + requiredMocks: func() { + svcMock. + On("DeletePublicKey", gomock.Anything, "8e:b3:e2:ce:3c:6c:27:ff:51:c9:5d:77:af:92:2f:d8", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() }, - expectedStatus: http.StatusOK, + expected: Expected{status: http.StatusOK}, }, } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(tc.query) + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() - jsonData, err := json.Marshal(tc.query) - if err != nil { - assert.NoError(t, err) + req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/sshkeys/public-keys/%s", tc.fingerprint), nil) + for k, v := range tc.headers { + req.Header.Set(k, v) } - req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("/api/sshkeys/public-keys/%s/tags", tc.query.Fingerprint), strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", tc.tenant) rec := httptest.NewRecorder() - e := NewRouter(mock) + e := NewRouter(svcMock) e.ServeHTTP(rec, req) - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) + assert.Equal(t, tc.expected.status, rec.Result().StatusCode) }) } } @@ -434,7 +321,7 @@ func TestCreatePrivateKey(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/internal/sshkeys/private-keys", nil) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) diff --git a/api/routes/stats.go b/api/routes/stats.go index 3388a621ec5..b4acb7b9343 100644 --- a/api/routes/stats.go +++ b/api/routes/stats.go @@ -14,7 +14,16 @@ const ( ) func (h *Handler) GetStats(c gateway.Context) error { - stats, err := h.service.GetStats(c.Ctx()) + req := new(requests.GetStats) + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { + return err + } + + stats, err := h.service.GetStats(c.Ctx(), req) if err != nil { return err } @@ -23,9 +32,8 @@ func (h *Handler) GetStats(c gateway.Context) error { } func (h *Handler) GetSystemInfo(c gateway.Context) error { - var req requests.SystemGetInfo - - if err := c.Bind(&req); err != nil { + req := new(requests.GetSystemInfo) + if err := c.Bind(req); err != nil { return err } @@ -33,7 +41,7 @@ func (h *Handler) GetSystemInfo(c gateway.Context) error { req.Host = c.Request().Host } - info, err := h.service.SystemGetInfo(c.Ctx(), req) + info, err := h.service.GetSystemInfo(c.Ctx(), req) if err != nil { return err } @@ -44,28 +52,10 @@ func (h *Handler) GetSystemInfo(c gateway.Context) error { func (h *Handler) GetSystemDownloadInstallScript(c gateway.Context) error { c.Response().Writer.Header().Add("Content-Type", "text/x-shellscript") - var req requests.SystemInstallScript - - if err := c.Bind(&req); err != nil { - return err - } - - if req.Host == "" { - req.Host = c.Request().Host - } - - if req.Scheme == "" { - req.Scheme = "http" - } - - if req.ForwardedPort != "" { - req.Host = req.Host + ":" + req.ForwardedPort - } - - tmpl, data, err := h.service.SystemDownloadInstallScript(c.Ctx(), req) + data, err := h.service.SystemDownloadInstallScript(c.Ctx()) if err != nil { return err } - return tmpl.Execute(c.Response().Writer, data) + return c.String(http.StatusOK, data) } diff --git a/api/routes/stats_test.go b/api/routes/stats_test.go index e757a44f6ff..c14b57513d3 100644 --- a/api/routes/stats_test.go +++ b/api/routes/stats_test.go @@ -7,11 +7,11 @@ import ( "strings" "testing" - "github.com/shellhub-io/shellhub/api/pkg/guard" - "github.com/shellhub-io/shellhub/pkg/models" - + "github.com/shellhub-io/shellhub/api/pkg/responses" "github.com/shellhub-io/shellhub/api/services/mocks" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" gomock "github.com/stretchr/testify/mock" ) @@ -21,22 +21,25 @@ func TestGetSystemInfo(t *testing.T) { cases := []struct { title string - request requests.SystemGetInfo - requiredMocks func(updatePayloadMock requests.SystemGetInfo) + request requests.GetSystemInfo + requiredMocks func(updatePayloadMock requests.GetSystemInfo) expectedStatus int }{ { title: "success when try to get infos of a existing system", - request: requests.SystemGetInfo{ + request: requests.GetSystemInfo{ Host: "example.com", Port: 0, }, - requiredMocks: func(updatePayloadMock requests.SystemGetInfo) { - mock.On("SystemGetInfo", gomock.Anything, requests.SystemGetInfo{ - Host: "example.com", - Port: 0, - }, - ).Return(&models.SystemInfo{}, nil) + requiredMocks: func(_ requests.GetSystemInfo) { + mock. + On( + "GetSystemInfo", + gomock.Anything, + &requests.GetSystemInfo{Host: "example.com", Port: 0}, + ). + Return(&responses.SystemInfo{}, nil). + Once() }, expectedStatus: http.StatusOK, }, @@ -53,7 +56,7 @@ func TestGetSystemInfo(t *testing.T) { req := httptest.NewRequest(http.MethodGet, "/api/info", strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) @@ -72,11 +75,12 @@ func TestGetStats(t *testing.T) { cases := []struct { title string reqStats *models.Stats + headers map[string]string expectedStatus int requiredMocks func() }{ { - title: "success when try to get an stats", + title: "success when try to get stats without tenantID", reqStats: &models.Stats{ RegisteredDevices: 10, OnlineDevices: 5, @@ -84,8 +88,31 @@ func TestGetStats(t *testing.T) { PendingDevices: 3, RejectedDevices: 2, }, + headers: map[string]string{ + "Content-Type": "application/json", + "X-Role": authorizer.RoleOwner.String(), + }, requiredMocks: func() { - mock.On("GetStats", gomock.Anything).Return(&models.Stats{}, nil) + mock.On("GetStats", gomock.Anything, &requests.GetStats{TenantID: ""}).Return(&models.Stats{}, nil) + }, + expectedStatus: http.StatusOK, + }, + { + title: "success when try to get stats with tenantID", + reqStats: &models.Stats{ + RegisteredDevices: 5, + OnlineDevices: 2, + ActiveSessions: 10, + PendingDevices: 1, + RejectedDevices: 0, + }, + headers: map[string]string{ + "Content-Type": "application/json", + "X-Role": authorizer.RoleOwner.String(), + "X-Tenant-ID": "00000000-0000-4000-0000-000000000000", + }, + requiredMocks: func() { + mock.On("GetStats", gomock.Anything, &requests.GetStats{TenantID: "00000000-0000-4000-0000-000000000000"}).Return(&models.Stats{}, nil) }, expectedStatus: http.StatusOK, }, @@ -97,8 +124,10 @@ func TestGetStats(t *testing.T) { req := httptest.NewRequest(http.MethodGet, "/api/stats", nil) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + for key, value := range tc.headers { + req.Header.Set(key, value) + } + rec := httptest.NewRecorder() e := NewRouter(mock) diff --git a/api/routes/tags.go b/api/routes/tags.go index 45fb91797bd..8b8f0c2e538 100644 --- a/api/routes/tags.go +++ b/api/routes/tags.go @@ -5,79 +5,152 @@ import ( "strconv" "github.com/shellhub-io/shellhub/api/pkg/gateway" - "github.com/shellhub-io/shellhub/api/pkg/guard" + "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/pkg/api/requests" ) const ( - // GetTagsURL gets all tags from all collections. - GetTagsURL = "/tags" - // RenameTagURL renames a tag in all collections. - RenameTagURL = "/tags/:tag" - // DeleteTagsURL deletes a tag from all collections. - DeleteTagsURL = "/tags/:tag" + URLGetTags = "/tags" + URLCreateTag = "/tags" + URLUpdateTag = "/tags/:name" + URLDeleteTag = "/tags/:name" + URLPushTagToDevice = "/devices/:uid/tags/:name" + URLPullTagFromDevice = "/devices/:uid/tags/:name" + + URLOldGetTags = "/namespaces/:tenant/tags" + URLOldCreateTag = "/namespaces/:tenant/tags" + URLOldUpdateTag = "/namespaces/:tenant/tags/:name" + URLOldDeleteTag = "/namespaces/:tenant/tags/:name" + URLOldPushTagToDevice = "/namespaces/:tenant/devices/:uid/tags/:name" + URLOldPullTagFromDevice = "/namespaces/:tenant/devices/:uid/tags/:name" ) +func (h *Handler) CreateTag(c gateway.Context) error { + req := new(requests.CreateTag) + + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { + return err + } + + insertedID, conflicts, err := h.service.CreateTag(c.Ctx(), req) + switch { + case len(conflicts) > 0: + return c.JSON(http.StatusConflict, map[string][]string{"conflicts": conflicts}) + case err != nil: + return err + default: + c.Response().Header().Add("X-Inserted-ID", insertedID) + + return c.NoContent(http.StatusOK) + } +} + func (h *Handler) GetTags(c gateway.Context) error { - var tenant string - if t := c.Tenant(); t != nil { - tenant = t.ID + req := new(requests.ListTags) + + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { + return err + } + + if err := req.Filters.Unmarshal(); err != nil { + return err } - tags, count, err := h.service.GetTags(c.Ctx(), tenant) + req.Paginator.Normalize() + req.Sorter.Normalize() + + tags, totalCount, err := h.service.ListTags(c.Ctx(), req) if err != nil { return err } - c.Response().Header().Set("X-Total-Count", strconv.Itoa(count)) + c.Response().Header().Set("X-Total-Count", strconv.Itoa(totalCount)) return c.JSON(http.StatusOK, tags) } -func (h *Handler) RenameTag(c gateway.Context) error { - var req requests.TagRename - var tenant string - if t := c.Tenant(); t != nil { - tenant = t.ID +func (h *Handler) UpdateTag(c gateway.Context) error { + req := new(requests.UpdateTag) + + if err := c.Bind(req); err != nil { + return err } - if err := c.Bind(&req); err != nil { + if err := c.Validate(req); err != nil { return err } - if err := c.Validate(&req); err != nil { + conflicts, err := h.service.UpdateTag(c.Ctx(), req) + switch { + case len(conflicts) > 0: + return c.JSON(http.StatusConflict, map[string][]string{"conflicts": conflicts}) + case err != nil: return err + default: + return c.NoContent(http.StatusOK) } +} - err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.RenameTag, func() error { - return h.service.RenameTag(c.Ctx(), tenant, req.Tag, req.NewTag) - }) - if err != nil { +func (h *Handler) DeleteTag(c gateway.Context) error { + req := new(requests.DeleteTag) + + if err := c.Bind(req); err != nil { + return err + } + + if err := c.Validate(req); err != nil { + return err + } + + if err := h.service.DeleteTag(c.Ctx(), req); err != nil { return err } return c.NoContent(http.StatusOK) } -func (h *Handler) DeleteTag(c gateway.Context) error { - var req requests.TagDelete - if err := c.Bind(&req); err != nil { +func (h *Handler) PushTagToDevice(c gateway.Context) error { + req := new(requests.PushTag) + + if err := c.Bind(req); err != nil { + return err + } + + req.TargetID = c.Param("uid") + + if err := c.Validate(req); err != nil { + return err + } + + if err := h.service.PushTagTo(c.Ctx(), store.TagTargetDevice, req); err != nil { return err } - if err := c.Validate(&req); err != nil { + return c.NoContent(http.StatusOK) +} + +func (h *Handler) PullTagFromDevice(c gateway.Context) error { + req := new(requests.PullTag) + + if err := c.Bind(req); err != nil { return err } - var tenant string - if t := c.Tenant(); t != nil { - tenant = t.ID + req.TargetID = c.Param("uid") + + if err := c.Validate(req); err != nil { + return err } - err := guard.EvaluatePermission(c.Role(), guard.Actions.Device.DeleteTag, func() error { - return h.service.DeleteTag(c.Ctx(), tenant, req.Tag) - }) - if err != nil { + if err := h.service.PullTagFrom(c.Ctx(), store.TagTargetDevice, req); err != nil { return err } diff --git a/api/routes/tags_test.go b/api/routes/tags_test.go deleted file mode 100644 index 068d7792e9f..00000000000 --- a/api/routes/tags_test.go +++ /dev/null @@ -1,282 +0,0 @@ -package routes - -import ( - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/shellhub-io/shellhub/api/pkg/guard" - "github.com/shellhub-io/shellhub/api/services/mocks" - "github.com/shellhub-io/shellhub/pkg/api/requests" - "github.com/stretchr/testify/assert" - gomock "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func TestGetTags(t *testing.T) { - mock := new(mocks.Service) - - cases := []struct { - title string - requiredMocks func() - expectedStatus int - }{ - { - title: "success when try to get an existing tag", - expectedStatus: http.StatusOK, - requiredMocks: func() { - mock.On("GetTags", gomock.Anything, "").Return([]string{"tag1", "tag2"}, 2, nil) - }, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks() - - req := httptest.NewRequest(http.MethodGet, "/api/tags", nil) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) - }) - } - - mock.AssertExpectations(t) -} - -func TestRenameTag(t *testing.T) { - mock := new(mocks.Service) - - type Expected struct { - expectedTags requests.TagRename - expectedStatus int - } - cases := []struct { - title string - requiredMocks func() - expected Expected - }{ - { - title: "fails when bind fails to validate uid", - expected: Expected{ - expectedTags: requests.TagRename{ - TagParam: requests.TagParam{Tag: "oldTag"}, - }, - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because the tag does not have a min of 3 characters", - expected: Expected{ - expectedTags: requests.TagRename{ - TagParam: requests.TagParam{Tag: "oldTag"}, - NewTag: "tg", - }, - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because the tag does not have a max of 255 characters", - expected: Expected{ - expectedTags: requests.TagRename{ - TagParam: requests.TagParam{Tag: "oldTag"}, - NewTag: "BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9", - }, - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because have a '/' with in your characters", - expected: Expected{ - expectedTags: requests.TagRename{ - TagParam: requests.TagParam{Tag: "oldTag"}, - NewTag: "/", - }, - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because have a '&' with in your characters", - expected: Expected{ - expectedTags: requests.TagRename{ - TagParam: requests.TagParam{Tag: "oldTag"}, - NewTag: "&", - }, - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because have a '@' with in your characters", - expected: Expected{ - expectedTags: requests.TagRename{ - TagParam: requests.TagParam{Tag: "oldTag"}, - NewTag: "@", - }, - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "success when try to renaming an existing tag", - expected: Expected{ - expectedTags: requests.TagRename{ - TagParam: requests.TagParam{Tag: "oldTag"}, - NewTag: "newTag", - }, - expectedStatus: http.StatusOK, - }, - requiredMocks: func() { - mock.On("RenameTag", gomock.Anything, "", "oldTag", "newTag").Return(nil) - }, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks() - jsonData, err := json.Marshal(tc.expected.expectedTags) - require.NoError(t, err) - - req := httptest.NewRequest(http.MethodPut, fmt.Sprintf("/api/tags/%s", tc.expected.expectedTags.Tag), strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) - }) - } - - mock.AssertExpectations(t) -} - -func TestDeleteTag(t *testing.T) { - mock := new(mocks.Service) - - type Expected struct { - expectedTags requests.TagDelete - expectedStatus int - } - cases := []struct { - title string - requiredMocks func() - tenant string - expected Expected - }{ - { - title: "fails when bind fails to validate uid", - expected: Expected{ - expectedTags: requests.TagDelete{ - TagParam: requests.TagParam{Tag: ""}, - }, - - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because the tag does not have a min of 3 characters", - expected: Expected{ - expectedTags: requests.TagDelete{ - TagParam: requests.TagParam{Tag: "tg"}, - }, - - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because the tag does not have a max of 255 characters", - expected: Expected{ - expectedTags: requests.TagDelete{ - TagParam: requests.TagParam{Tag: "BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9BCD3821E12F7A6D89295D86E277F2C365D7A4C3FCCD75D8A2F46C0A556A8EBAAF0845C85D50241FC2F9806D8668FF75D262FDA0A055784AD36D8CA7D2BB600C9"}, - }, - - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because have a '/' with in your characters", - expected: Expected{ - expectedTags: requests.TagDelete{ - TagParam: requests.TagParam{Tag: "/"}, - }, - - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because have a '&' with in your characters", - expected: Expected{ - expectedTags: requests.TagDelete{ - TagParam: requests.TagParam{Tag: "&"}, - }, - - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "fails when validate because have a '@' with in your characters", - expected: Expected{ - expectedTags: requests.TagDelete{ - TagParam: requests.TagParam{Tag: "@"}, - }, - - expectedStatus: http.StatusBadRequest, - }, - requiredMocks: func() {}, - }, - { - title: "success when try to deleting an existing tag", - expected: Expected{ - expectedTags: requests.TagDelete{ - TagParam: requests.TagParam{Tag: "tagtest"}, - }, - expectedStatus: http.StatusOK, - }, - tenant: "tenant", - requiredMocks: func() { - mock.On("DeleteTag", gomock.Anything, "tenant", "tagtest").Return(nil) - }, - }, - } - - for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks() - jsonData, err := json.Marshal(tc.expected.expectedTags) - require.NoError(t, err) - - req := httptest.NewRequest(http.MethodDelete, fmt.Sprintf("/api/tags/%s", tc.expected.expectedTags), strings.NewReader(string(jsonData))) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) - req.Header.Set("X-Tenant-ID", tc.tenant) - rec := httptest.NewRecorder() - - e := NewRouter(mock) - e.ServeHTTP(rec, req) - - assert.Equal(t, tc.expected.expectedStatus, rec.Result().StatusCode) - }) - } - - mock.AssertExpectations(t) -} diff --git a/api/routes/user.go b/api/routes/user.go index 9a2153fcb1c..9dd6dcb097c 100644 --- a/api/routes/user.go +++ b/api/routes/user.go @@ -7,12 +7,12 @@ import ( "github.com/shellhub-io/shellhub/api/services" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/errors" - "github.com/shellhub-io/shellhub/pkg/models" ) const ( - UpdateUserDataURL = "/users/:id/data" - UpdateUserPasswordURL = "/users/:id/password" //nolint:gosec + URLUpdateUser = "/users" + URLDeprecatedUpdateUser = "/users/:id/data" + URLDeprecatedUpdateUserPassword = "/users/:id/password" //nolint:gosec ) const ( @@ -20,21 +20,18 @@ const ( ParamUserName = "username" ) -func (h *Handler) UpdateUserData(c gateway.Context) error { - var req requests.UserDataUpdate - if err := c.Bind(&req); err != nil { +func (h *Handler) UpdateUser(c gateway.Context) error { + req := new(requests.UpdateUser) + + if err := c.Bind(req); err != nil { return err } - if err := c.Validate(&req); err != nil { + if err := c.Validate(req); err != nil { return err } - if fields, err := h.service.UpdateDataUser(c.Ctx(), req.ID, models.UserData{ - Name: req.Name, - Username: req.Username, - Email: req.Email, - }); err != nil { + if fields, err := h.service.UpdateUser(c.Ctx(), req); err != nil { // FIXME: API compatibility. // // The UI uses the fields with error messages to identify if it is invalid or duplicated. @@ -45,7 +42,11 @@ func (h *Handler) UpdateUserData(c gateway.Context) error { switch e.Code { case services.ErrCodeInvalid: - return c.JSON(http.StatusBadRequest, fields) + if len(fields) > 1 { + return c.JSON(http.StatusBadRequest, fields) + } + + return c.NoContent(http.StatusBadRequest) case services.ErrCodeDuplicated: return c.JSON(http.StatusConflict, fields) default: diff --git a/api/routes/user_test.go b/api/routes/user_test.go index aeeacbe4110..69461825c77 100644 --- a/api/routes/user_test.go +++ b/api/routes/user_test.go @@ -8,96 +8,144 @@ import ( "strings" "testing" - "github.com/shellhub-io/shellhub/api/pkg/guard" svc "github.com/shellhub-io/shellhub/api/services" "github.com/shellhub-io/shellhub/api/services/mocks" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/api/requests" - "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" gomock "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" ) -func TestUpdateUserData(t *testing.T) { - mock := new(mocks.Service) +func TestUpdateUser(t *testing.T) { + type Expected struct { + status int + } + + svcMock := new(mocks.Service) cases := []struct { - title string - uid string - updatePayloadMock requests.UserDataUpdate - requiredMocks func(updatePayloadMock models.UserData) - expectedStatus int + description string + headers map[string]string + body requests.UpdateUser + requiredMocks func() + expected Expected }{ { - title: "fails when bind fails to validate uid", - uid: "1234", - updatePayloadMock: requests.UserDataUpdate{ - Name: "new name", - Username: "usernameteste", - Email: "newemail@example.com", + description: "fails when bind fails to validate e-mail", + headers: map[string]string{ + "X-ID": "000000000000000000000000", + "X-Role": "owner", }, - requiredMocks: func(updatePayloadMock models.UserData) {}, - expectedStatus: http.StatusBadRequest, + body: requests.UpdateUser{ + Name: "John Doe", + Username: "john_doe", + Email: "invalid.com", + RecoveryEmail: "invalid.com", + }, + requiredMocks: func() {}, + expected: Expected{http.StatusBadRequest}, }, { - title: "fails when try to updating a non-existing user", - uid: "1234", - updatePayloadMock: requests.UserDataUpdate{ - UserParam: requests.UserParam{ - ID: "1234", - }, - Name: "new name", - Username: "usernameteste", - Email: "newemail@example.com", + description: "fails when bind fails to validate username", + headers: map[string]string{ + "X-ID": "000000000000000000000000", + "X-Role": "owner", }, - requiredMocks: func(updatePayloadMock models.UserData) { - mock.On("UpdateDataUser", gomock.Anything, "1234", updatePayloadMock).Return(nil, svc.ErrUserNotFound) + body: requests.UpdateUser{ + Name: "John Doe", + Username: "_", + Email: "john.doe@test.com", + RecoveryEmail: "john.doe@test.com", }, - expectedStatus: http.StatusNotFound, + requiredMocks: func() {}, + expected: Expected{http.StatusBadRequest}, }, { - title: "success when try to updating an existing user", - uid: "123", - updatePayloadMock: requests.UserDataUpdate{ - UserParam: requests.UserParam{ - ID: "123", - }, - Name: "new name", - Username: "usernameteste", - Email: "newemail@example.com", + description: "fails when try to updating a non-existing user", + headers: map[string]string{ + "X-ID": "000000000000000000000000", + "X-Role": "owner", }, - requiredMocks: func(updatePayloadMock models.UserData) { - mock.On("UpdateDataUser", gomock.Anything, "123", updatePayloadMock).Return(nil, nil) + body: requests.UpdateUser{ + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "john.doe@test.com", }, - expectedStatus: http.StatusOK, + requiredMocks: func() { + svcMock. + On( + "UpdateUser", + gomock.Anything, + &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "john.doe@test.com", + }, + ). + Return(nil, svc.ErrUserNotFound). + Once() + }, + expected: Expected{http.StatusNotFound}, + }, + { + description: "success when try to updating an existing user", + body: requests.UpdateUser{ + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "john.doe@test.com", + }, + headers: map[string]string{ + "X-ID": "000000000000000000000000", + "X-Role": "owner", + }, + requiredMocks: func() { + svcMock. + On( + "UpdateUser", + gomock.Anything, + &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "john.doe@test.com", + }, + ). + Return(nil, nil). + Once() + }, + expected: Expected{http.StatusOK}, }, } for _, tc := range cases { - t.Run(tc.title, func(t *testing.T) { - tc.requiredMocks(models.UserData{ - Name: tc.updatePayloadMock.Name, - Username: tc.updatePayloadMock.Username, - Email: tc.updatePayloadMock.Email, - }) + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() - jsonData, err := json.Marshal(tc.updatePayloadMock) - if err != nil { - assert.NoError(t, err) - } + data, err := json.Marshal(tc.body) + require.NoError(t, err) - req := httptest.NewRequest(http.MethodPatch, fmt.Sprintf("/api/users/%s/data", tc.uid), strings.NewReader(string(jsonData))) + req := httptest.NewRequest(http.MethodPatch, "/api/users", strings.NewReader(string(data))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + for k, v := range tc.headers { + req.Header.Set(k, v) + } + rec := httptest.NewRecorder() - e := NewRouter(mock) + e := NewRouter(svcMock) e.ServeHTTP(rec, req) - assert.Equal(t, tc.expectedStatus, rec.Result().StatusCode) + assert.Equal(t, tc.expected, Expected{rec.Result().StatusCode}) }) } - mock.AssertExpectations(t) + svcMock.AssertExpectations(t) } func TestUpdateUserPassword(t *testing.T) { @@ -118,7 +166,7 @@ func TestUpdateUserPassword(t *testing.T) { ID: "123", }, }, - requiredMocks: func(updatePayloadMock requests.UserPasswordUpdate) {}, + requiredMocks: func(_ requests.UserPasswordUpdate) {}, expectedStatus: http.StatusBadRequest, }, { @@ -131,7 +179,7 @@ func TestUpdateUserPassword(t *testing.T) { CurrentPassword: "fail", NewPassword: "new_password", }, - requiredMocks: func(updatePayloadMock requests.UserPasswordUpdate) {}, + requiredMocks: func(_ requests.UserPasswordUpdate) {}, expectedStatus: http.StatusBadRequest, }, { @@ -144,7 +192,7 @@ func TestUpdateUserPassword(t *testing.T) { CurrentPassword: "1a3b8f0c2e5d7g9i4k6m8o2q5s7u9w1v7", NewPassword: "new_password", }, - requiredMocks: func(updatePayloadMock requests.UserPasswordUpdate) {}, + requiredMocks: func(_ requests.UserPasswordUpdate) {}, expectedStatus: http.StatusBadRequest, }, { @@ -157,7 +205,7 @@ func TestUpdateUserPassword(t *testing.T) { CurrentPassword: "new_password", NewPassword: "fail", }, - requiredMocks: func(updatePayloadMock requests.UserPasswordUpdate) {}, + requiredMocks: func(_ requests.UserPasswordUpdate) {}, expectedStatus: http.StatusBadRequest, }, { @@ -170,7 +218,7 @@ func TestUpdateUserPassword(t *testing.T) { CurrentPassword: "new_password", NewPassword: "1a3b8f0c2e5d7g9i4k6m8o2q5s7u9w1v7", }, - requiredMocks: func(updatePayloadMock requests.UserPasswordUpdate) {}, + requiredMocks: func(_ requests.UserPasswordUpdate) {}, expectedStatus: http.StatusBadRequest, }, { @@ -183,7 +231,7 @@ func TestUpdateUserPassword(t *testing.T) { NewPassword: "duplicate", CurrentPassword: "duplicate", }, - requiredMocks: func(updatePayloadMock requests.UserPasswordUpdate) {}, + requiredMocks: func(_ requests.UserPasswordUpdate) {}, expectedStatus: http.StatusBadRequest, }, { @@ -229,7 +277,7 @@ func TestUpdateUserPassword(t *testing.T) { req := httptest.NewRequest(http.MethodPatch, fmt.Sprintf("/api/users/%s/password", tc.uid), strings.NewReader(string(jsonData))) req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Role", guard.RoleOwner) + req.Header.Set("X-Role", authorizer.RoleOwner.String()) rec := httptest.NewRecorder() e := NewRouter(mock) diff --git a/api/server.go b/api/server.go index cb5fdd1d3cc..193184551db 100644 --- a/api/server.go +++ b/api/server.go @@ -1,161 +1,226 @@ package main import ( - "errors" + "context" "os" + "strings" "github.com/getsentry/sentry-go" "github.com/labstack/echo/v4" - echoMiddleware "github.com/labstack/echo/v4/middleware" - "github.com/shellhub-io/shellhub/api/pkg/echo/handlers" - "github.com/shellhub-io/shellhub/api/pkg/gateway" "github.com/shellhub-io/shellhub/api/routes" + "github.com/shellhub-io/shellhub/api/routes/middleware" "github.com/shellhub-io/shellhub/api/services" - "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/api/store/mongo" - "github.com/shellhub-io/shellhub/api/workers" - requests "github.com/shellhub-io/shellhub/pkg/api/internalclient" - storecache "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/pkg/geoip" - "github.com/shellhub-io/shellhub/pkg/middleware" + "github.com/shellhub-io/shellhub/api/store/mongo/options" + "github.com/shellhub-io/shellhub/pkg/api/internalclient" + "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/geoip/geolite2" + "github.com/shellhub-io/shellhub/pkg/worker" + "github.com/shellhub-io/shellhub/pkg/worker/asynq" log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" ) -var serverCmd = &cobra.Command{ - Use: "server", - RunE: func(cmd *cobra.Command, args []string) error { - ctx := cmd.Context() +type env struct { + // MongoURI specifies the connection string for MongoDB. + MongoURI string `env:"MONGO_URI,default=mongodb://mongo:27017/main"` - cfg, ok := ctx.Value("cfg").(*config) - if !ok { - log.Fatal("Failed to retrieve environment config from context") - } + // RedisURI specifies the connection string for Redis. + RedisURI string `env:"REDIS_URI,default=redis://redis:6379"` + // RedisCachePoolSize defines the maximum number of concurrent connections to Redis cache. + // Set to 0 for unlimited connections. + RedisCachePoolSize int `env:"REDIS_CACHE_POOL_SIZE,default=0"` - log.Trace("Connecting to Redis") + // SentryDSN specifies the Data Source Name for Sentry error tracking. + // Leave empty to disable Sentry integration. + SentryDSN string `env:"SENTRY_DSN,default="` - cache, err := storecache.NewRedisCache(cfg.RedisURI) - if err != nil { - log.WithError(err).Error("Failed to configure redis store cache") - } + // AsynqGroupMaxDelay specifies the maximum time (in seconds) to wait before + // processing a group of tasks, regardless of other conditions. + AsynqGroupMaxDelay int `env:"ASYNQ_GROUP_MAX_DELAY,default=1"` + // AsynqGroupGracePeriod defines the grace period (in seconds) before task aggregation. + // Tasks arriving within this period will be aggregated with existing tasks in the group. + AsynqGroupGracePeriod int64 `env:"ASYNQ_GROUP_GRACE_PERIOD,default=2"` + // AsynqGroupMaxSize specifies the maximum number of tasks that can be aggregated in a group. + // When this limit is reached, the group will be processed immediately. + AsynqGroupMaxSize int `env:"ASYNQ_GROUP_MAX_SIZE,default=1000"` + // AsynqUniquenessTimeout defines how long (in hours) a unique job remains locked in the queue. + // If a job doesn't complete within this period, its lock is released, allowing a new instance + // to be enqueued and executed. + AsynqUniquenessTimeout int `env:"ASYNQ_UNIQUENESS_TIMEOUT,default=24"` + + // GeoipMirror specifies an alternative URL for downloading GeoIP databases. + // When configured, this takes precedence over GeoipMaxmindLicense. + GeoipMirror string `env:"MAXMIND_MIRROR,default="` + // GeoipMaxmindLicense is the MaxMind license key for downloading GeoIP databases directly. + // This is used as a fallback when GeoipMirror is not configured. + GeoipMaxmindLicense string `env:"MAXMIND_LICENSE,default="` + + // Metrics enables the /metrics endpoint. + Metrics bool `env:"METRICS,default=false"` +} - log.Info("Connected to Redis") +type Server struct { + env *env + router *echo.Echo // TODO: evaluate if we can create a custom struct in router (e.g. router.Router) + worker worker.Server +} - log.Trace("Connecting to MongoDB") +// Setup initializes all server components including database connections, cache, services, API routes, and background workers. +// It prepares the server for starting but does not actually begin serving requests. +func (s *Server) Setup(ctx context.Context) error { + log.Info("Setting up server components") - store, err := mongo.NewStoreMongo(cmd.Context(), cache, cfg.MongoURI) - if err != nil { - log.WithError(err).Fatal("failed to create the store") - } + cache, err := cache.NewRedisCache(s.env.RedisURI, s.env.RedisCachePoolSize) + if err != nil { + return err + } - log.Info("Connected to MongoDB") + log.Debug("Redis cache initialized successfully") - worker, err := workers.New(store) - if err != nil { - log.WithError(err).Warn("Failed to create workers.") - } - worker.Start() + store, err := mongo.NewStore(ctx, s.env.MongoURI, cache, options.RunMigatrions) + if err != nil { + log. + WithError(err). + Fatal("failed to create the store") + } - return startServer(cfg, store, cache) - }, -} + log.Debug("MongoDB store connected successfully") -// Provides the configuration for the API service. -// The values are load from the system environment variables. -type config struct { - // MongoDB connection string (URI format) - MongoURI string `env:"MONGO_URI,default=mongodb://mongo:27017/main"` - // Redis connection string (URI format) - RedisURI string `env:"REDIS_URI,default=redis://redis:6379"` - // Enable GeoIP feature. - // - // GeoIP features enable the ability to get the logitude and latitude of the client from the IP address. - // The feature is disabled by default. To enable it, it is required to have a `MAXMIND` database license and feed it - // to `SHELLHUB_MAXMIND_LICENSE` with it, and `SHELLHUB_GEOIP=true`. - GeoIP bool `env:"GEOIP,default=false"` - // Session record cleanup worker schedule - SessionRecordCleanupSchedule string `env:"SESSION_RECORD_CLEANUP_SCHEDULE,default=@daily"` - // Sentry DSN. - SentryDSN string `env:"SENTRY_DSN,default="` + apiClient, err := internalclient.NewClient(nil, internalclient.WithAsynqWorker(s.env.RedisURI)) + if err != nil { + return err + } + + servicesOptions, err := s.serviceOptions(ctx) + if err != nil { + return err + } + + routerOptions, err := s.routerOptions() + if err != nil { + return err + } + + service := services.NewService(store, nil, nil, cache, apiClient, servicesOptions...) + s.router = routes.NewRouter(service, routerOptions...) + + s.worker = asynq.NewServer( + s.env.RedisURI, + asynq.BatchConfig(s.env.AsynqGroupMaxSize, s.env.AsynqGroupMaxDelay, int(s.env.AsynqGroupGracePeriod)), + asynq.UniquenessTimeout(s.env.AsynqUniquenessTimeout), + ) + + s.worker.HandleTask(services.TaskDevicesHeartbeat, service.DevicesHeartbeat(), asynq.BatchTask()) + s.worker.HandleCron(services.CronDeviceCleanup, service.DeviceCleanup(), asynq.Unique()) + + log.Info("Server setup completed successfully") + + return nil } -func init() { - if value, ok := os.LookupEnv("SHELLHUB_ENV"); ok && value == "development" { - log.SetLevel(log.TraceLevel) - log.Debug("Log level set to Trace") - } else { - log.Debug("Log level default") +// Start begins serving API requests and processing background tasks. It blocks the current goroutine until the server stops +// or encounters an error. +func (s *Server) Start() error { + log.Info("Starting server components") + + if err := s.worker.Start(); err != nil { + return err + } + + if err := s.router.Start(":8080"); err != nil { + return err } + + return nil } -// startSentry initializes the Sentry client. -// -// The Sentry client is used to report errors to the Sentry server, and is initialized only if the `SHELLHUB_SENTRY_DSN` -// environment variable is set. Else, the function returns a error with a not initialized Sentry client. -func startSentry(dsn string) (*sentry.Client, error) { - if dsn != "" { - var err error - reporter, err := sentry.NewClient(sentry.ClientOptions{ //nolint:exhaustruct - Dsn: dsn, - Release: os.Getenv("SHELLHUB_VERSION"), - EnableTracing: true, - TracesSampleRate: 1, - }) - if err != nil { - log.WithError(err).Error("Failed to create Sentry client") +// Shutdown gracefully terminates all server components. +func (s *Server) Shutdown() { + log.Info("Gracefully shutting down server") + + s.worker.Shutdown() + s.router.Close() // nolint: errcheck + + log.Info("Server shutdown complete") +} + +// serviceOptions returns configuration options for the application services. +func (s *Server) serviceOptions(ctx context.Context) ([]services.Option, error) { + opts := []services.Option{} + + var geoipFetcher geolite2.GeoliteFetcher + switch { + case s.env.GeoipMirror != "": + log.Info("Using custom mirror for GeoIP database") + geoipFetcher = geolite2.FetchFromMirror(s.env.GeoipMirror) + case s.env.GeoipMaxmindLicense != "": + log.Info("Using MaxMind license key for GeoIP database") + geoipFetcher = geolite2.FetchFromLicenseKey(s.env.GeoipMaxmindLicense) + } + if geoipFetcher != nil { + locator, err := geolite2.NewLocator(ctx, geoipFetcher) + if err != nil { return nil, err } - log.Info("Sentry client started") - return reporter, nil + log.Info("GeoIP locator initialized successfully") + + opts = append(opts, services.WithLocator(locator)) } - return nil, errors.New("sentry DSN not provided") + return opts, nil } -func startServer(cfg *config, store store.Store, cache storecache.Cache) error { - log.Info("Starting Sentry client") - - reporter, err := startSentry(cfg.SentryDSN) - if err != nil { - log.WithField("DSN", cfg.SentryDSN).WithError(err).Warn("Failed to start Sentry") - } else { - log.Info("Sentry client started") - } +// routerOptions returns configuration options for the HTTP router. +func (s *Server) routerOptions() ([]routes.Option, error) { + opts := []routes.Option{} - log.Info("Starting API server") + if s.env.SentryDSN != "" { + log.Info("Initializing Sentry error reporting") - requestClient := requests.NewClient() + sentryOpts := sentry.ClientOptions{ //nolint:exhaustruct + Dsn: s.env.SentryDSN, + Release: os.Getenv("SHELLHUB_VERSION"), + EnableTracing: true, + TracesSampleRate: 1, + } - var locator geoip.Locator - if cfg.GeoIP { - log.Info("GeoIP feature is enable") - locator, err = geoip.NewGeoLite2() + reporter, err := sentry.NewClient(sentryOpts) if err != nil { - log.WithError(err).Fatal("Failed to init GeoIP") + return nil, err } - } else { - log.Info("GeoIP is disabled") - locator = geoip.NewNullGeoLite() + + log.Info("Sentry error reporting initialized successfully") + + opts = append(opts, routes.WithReporter(reporter)) } - service := services.NewService(store, nil, nil, cache, requestClient, locator) + if s.env.Metrics { + log.Info("Enabling metrics endpoint") + + opts = append(opts, routes.WithMetrics()) + } - e := routes.NewRouter(service) - e.Use(middleware.Log) - e.Use(echoMiddleware.RequestID()) - e.HTTPErrorHandler = handlers.NewErrors(reporter) + if envs.IsDevelopment() { + log.Info("Enabling OpenAPI validation in development mode") - e.Use(func(next echo.HandlerFunc) echo.HandlerFunc { - return func(c echo.Context) error { - apicontext := gateway.NewContext(service, c) + opts = append(opts, routes.WithOpenAPIValidator(&middleware.OpenAPIValidatorConfig{ + // NOTE: By default, metrics and internal endpoints are skipped from validation for now. + Skipper: func(ctx echo.Context) bool { + routes := []string{"/metrics", "/internal"} - return next(apicontext) - } - }) + for _, path := range routes { + if strings.HasPrefix(ctx.Request().URL.Path, path) { + return true + } + } - e.Logger.Fatal(e.Start(":8080")) + return false + }, + })) + } - return nil + return opts, nil } diff --git a/api/services/api-key.go b/api/services/api-key.go new file mode 100644 index 00000000000..1dbe06d8017 --- /dev/null +++ b/api/services/api-key.go @@ -0,0 +1,164 @@ +package services + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "errors" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/api/responses" + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/uuid" +) + +type APIKeyService interface { + // CreateAPIKey creates a new API key for the specified namespace. If req.Key is empty it will generate a + // random UUID, the optional req.OptRole must be less or equal than the user's role when provided. The key + // will be hashed into an SHA256 hash. It returns the inserted UUID and an error, if any. + CreateAPIKey(ctx context.Context, req *requests.CreateAPIKey) (res *responses.CreateAPIKey, err error) + + // ListAPIKeys retrieves a list of API keys within the specified tenant ID. It returns the list of API keys, the + // total count of documents in the database, and an error, if any. + ListAPIKeys(ctx context.Context, req *requests.ListAPIKey) (apiKeys []models.APIKey, count int, err error) + + // UpdateAPIKey updates an API key with the provided tenant ID and name. It returns an error, if any. + UpdateAPIKey(ctx context.Context, req *requests.UpdateAPIKey) (err error) + + // DeleteAPIKey deletes an API key with the provided tenant ID and name. It returns an error, if any. + DeleteAPIKey(ctx context.Context, req *requests.DeleteAPIKey) (err error) +} + +func (s *service) CreateAPIKey(ctx context.Context, req *requests.CreateAPIKey) (*responses.CreateAPIKey, error) { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID); err != nil { + return nil, NewErrNamespaceNotFound(req.TenantID, err) + } + + expiresIn := int64(0) + switch req.ExpiresAt { + case 30, 60, 90: + expiresIn = clock.Now().AddDate(0, 0, req.ExpiresAt).Unix() + case 365: + expiresIn = clock.Now().AddDate(1, 0, 0).Unix() + case -1: + expiresIn = -1 + default: + return nil, NewErrBadRequest(errors.New("experid date to APIKey is invalid")) + } + + if req.Key == "" { + req.Key = uuid.Generate() + } + + if req.OptRole != "" { + if !req.Role.HasAuthority(req.OptRole) { + return nil, NewErrRoleInvalid() + } + + req.Role = req.OptRole + } + + // We don't store the plain key, which means we cannot save (because it is the primary key) + // the UUID with a nondeterministic hash (like bcrypt). For this reason, we convert the + // key to a SHA256 hash, which is guaranteed to be the same every time. This way, when + // retrieving the API key by the UUID, we can simply convert the UUID to a SHA256 hash and + // try to match it. + keySum := sha256.Sum256([]byte(req.Key)) + hashedKey := hex.EncodeToString(keySum[:]) + + if conflicts, has, _ := s.store.APIKeyConflicts(ctx, req.TenantID, &models.APIKeyConflicts{ID: hashedKey, Name: req.Name}); has { + return nil, NewErrAPIKeyDuplicated(conflicts) + } + + data := &models.APIKey{ + ID: hashedKey, + Name: req.Name, + TenantID: req.TenantID, + Role: req.Role, + ExpiresIn: expiresIn, + CreatedBy: req.UserID, + } + + if _, err := s.store.APIKeyCreate(ctx, data); err != nil { + return nil, err + } + + // As we need to return the plain key in the create service, we temporarily set + // the apiKey.ID to the plain key here. + apiKey, _ := s.store.APIKeyResolve(ctx, store.APIKeyIDResolver, hashedKey) + apiKey.ID = req.Key + + return responses.CreateAPIKeyFromModel(apiKey), nil +} + +func (s *service) ListAPIKeys(ctx context.Context, req *requests.ListAPIKey) ([]models.APIKey, int, error) { + return s.store.APIKeyList( + ctx, + s.store.Options().InNamespace(req.TenantID), + s.store.Options().Sort(&req.Sorter), + s.store.Options().Paginate(&req.Paginator), + ) +} + +func (s *service) UpdateAPIKey(ctx context.Context, req *requests.UpdateAPIKey) error { + ns, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil { + return NewErrNamespaceNotFound(req.TenantID, err) + } + + // If req.Role is not empty, it must be lower than the user's role. + if req.Role != "" { + if m, ok := ns.FindMember(req.UserID); !ok || !m.Role.HasAuthority(req.Role) { + return NewErrRoleInvalid() + } + } + + apiKey, err := s.store.APIKeyResolve(ctx, store.APIKeyNameResolver, req.CurrentName, s.store.Options().InNamespace(req.TenantID)) + if err != nil { + switch { + case errors.Is(err, store.ErrNoDocuments): + return NewErrAPIKeyNotFound(req.CurrentName, err) + default: + return err + } + } + + if apiKey.Name != req.Name { + if conflicts, has, _ := s.store.APIKeyConflicts(ctx, req.TenantID, &models.APIKeyConflicts{Name: req.Name}); has { + return NewErrAPIKeyDuplicated(conflicts) + } + } + + if req.Name != "" { + apiKey.Name = req.Name + } + if string(req.Role) != "" { + apiKey.Role = req.Role + } + + if err := s.store.APIKeyUpdate(ctx, apiKey); err != nil { //nolint:revive + return err + } + + return nil +} + +func (s *service) DeleteAPIKey(ctx context.Context, req *requests.DeleteAPIKey) error { + apiKey, err := s.store.APIKeyResolve(ctx, store.APIKeyNameResolver, req.Name, s.store.Options().InNamespace(req.TenantID)) + if err != nil { + switch { + case errors.Is(err, store.ErrNoDocuments): + return NewErrAPIKeyNotFound(req.Name, err) + default: + return err + } + } + + if err := s.store.APIKeyDelete(ctx, apiKey); err != nil { //nolint:revive + return err + } + + return nil +} diff --git a/api/services/api-key_test.go b/api/services/api-key_test.go new file mode 100644 index 00000000000..4c57f555eb0 --- /dev/null +++ b/api/services/api-key_test.go @@ -0,0 +1,882 @@ +package services + +import ( + "context" + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "encoding/hex" + "errors" + "testing" + + "github.com/shellhub-io/shellhub/api/store" + storemock "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/api/responses" + storecache "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/uuid" + uuidmock "github.com/shellhub-io/shellhub/pkg/uuid/mocks" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" +) + +func TestCreateAPIKey(t *testing.T) { + type Expected struct { + res *responses.CreateAPIKey + err error + } + + storeMock := new(storemock.Store) + + cases := []struct { + description string + req *requests.CreateAPIKey + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "fails when namespace does not exists invalid", + req: &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Key: "cdfd3cb0-c44e-4e54-b931-6d57713ad159", + Name: "dev", + ExpiresAt: -1, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, errors.New("error")). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", errors.New("error")), + }, + }, + { + description: "fails when days to expire is invalid", + req: &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Key: "cdfd3cb0-c44e-4e54-b931-6d57713ad159", + Name: "dev", + ExpiresAt: 2, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + Name: "namespace", + Owner: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: "owner", + }, + }, + }, + nil, + ). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrBadRequest(errors.New("experid date to APIKey is invalid")), + }, + }, + { + description: "fails when opt role is greater than user's role", + req: &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "administrator", + Key: "cdfd3cb0-c44e-4e54-b931-6d57713ad159", + Name: "dev", + ExpiresAt: -1, + OptRole: "owner", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + Name: "namespace", + Owner: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: "owner", + }, + }, + }, + nil, + ). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrRoleInvalid(), + }, + }, + { + description: "fails when attributes are duplicated", + req: &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Key: "cdfd3cb0-c44e-4e54-b931-6d57713ad159", + Name: "dev", + ExpiresAt: -1, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + Name: "namespace", + Owner: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: "owner", + }, + }, + }, + nil, + ). + Once() + + keySum := sha256.Sum256([]byte("cdfd3cb0-c44e-4e54-b931-6d57713ad159")) + hashedKey := hex.EncodeToString(keySum[:]) + + storeMock. + On("APIKeyConflicts", ctx, "00000000-0000-4000-0000-000000000000", &models.APIKeyConflicts{ID: hashedKey, Name: "dev"}). + Return([]string{"id", "name"}, true, nil). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrAPIKeyDuplicated([]string{"id", "name"}), + }, + }, + { + description: "fails when unable to create the key", + req: &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Key: "cdfd3cb0-c44e-4e54-b931-6d57713ad159", + Name: "dev", + ExpiresAt: -1, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + Name: "namespace", + Owner: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: "owner", + }, + }, + }, + nil, + ). + Once() + + keySum := sha256.Sum256([]byte("cdfd3cb0-c44e-4e54-b931-6d57713ad159")) + hashedKey := hex.EncodeToString(keySum[:]) + + storeMock. + On("APIKeyConflicts", ctx, "00000000-0000-4000-0000-000000000000", &models.APIKeyConflicts{ID: hashedKey, Name: "dev"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("APIKeyCreate", ctx, &models.APIKey{ + ID: hashedKey, + Name: "dev", + CreatedBy: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + ExpiresIn: -1, + }). + Return("", errors.New("error")). + Once() + }, + expected: Expected{ + res: nil, + err: errors.New("error"), + }, + }, + { + description: "succeeds", + req: &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Key: "cdfd3cb0-c44e-4e54-b931-6d57713ad159", + Name: "dev", + ExpiresAt: -1, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + Name: "namespace", + Owner: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: "owner", + }, + }, + }, + nil, + ). + Once() + + uuidMock := &uuidmock.Uuid{} + uuid.DefaultBackend = uuidMock + uuidMock. + On("Generate"). + Return("cdfd3cb0-c44e-4e54-b931-6d57713ad159"). + Once() + + keySum := sha256.Sum256([]byte("cdfd3cb0-c44e-4e54-b931-6d57713ad159")) + hashedKey := hex.EncodeToString(keySum[:]) + + storeMock. + On("APIKeyConflicts", ctx, "00000000-0000-4000-0000-000000000000", &models.APIKeyConflicts{ID: hashedKey, Name: "dev"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("APIKeyCreate", ctx, &models.APIKey{ + ID: hashedKey, + Name: "dev", + CreatedBy: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + ExpiresIn: -1, + }). + Return(hashedKey, nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyIDResolver, hashedKey). + Return(&models.APIKey{ + ID: hashedKey, + Name: "dev", + CreatedBy: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + ExpiresIn: -1, + }, nil). + Once() + }, + expected: Expected{ + res: &responses.CreateAPIKey{ + ID: "cdfd3cb0-c44e-4e54-b931-6d57713ad159", + Name: "dev", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + ExpiresIn: -1, + }, + err: nil, + }, + }, + { + description: "succeeds when request key is empty", + req: &requests.CreateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Key: "", + Name: "dev", + ExpiresAt: -1, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + Name: "namespace", + Owner: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: "owner", + }, + }, + }, + nil, + ). + Once() + + uuidMock := &uuidmock.Uuid{} + uuid.DefaultBackend = uuidMock + uuidMock. + On("Generate"). + Return("1e7b0f4b-aca4-48eb-a353-7469f00665ed"). + Once() + + keySum := sha256.Sum256([]byte("1e7b0f4b-aca4-48eb-a353-7469f00665ed")) + hashedKey := hex.EncodeToString(keySum[:]) + + storeMock. + On("APIKeyConflicts", ctx, "00000000-0000-4000-0000-000000000000", &models.APIKeyConflicts{ID: hashedKey, Name: "dev"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("APIKeyCreate", ctx, &models.APIKey{ + ID: hashedKey, + Name: "dev", + CreatedBy: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + ExpiresIn: -1, + }). + Return(hashedKey, nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyIDResolver, hashedKey). + Return(&models.APIKey{ + ID: hashedKey, + Name: "dev", + CreatedBy: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + ExpiresIn: -1, + }, nil). + Once() + }, + expected: Expected{ + res: &responses.CreateAPIKey{ + ID: "1e7b0f4b-aca4-48eb-a353-7469f00665ed", + Name: "dev", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "owner", + ExpiresIn: -1, + }, + err: nil, + }, + }, + } + + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + s := NewService(storeMock, privateKey, &privateKey.PublicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + tc.requiredMocks(ctx) + + res, err := s.CreateAPIKey(ctx, tc.req) + require.Equal(t, tc.expected, Expected{res, err}) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestListAPIKey(t *testing.T) { + type Expected struct { + apiKeys []models.APIKey + count int + err error + } + + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + cases := []struct { + description string + tenantID string + req *requests.ListAPIKey + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "fails", + req: &requests.ListAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "expires_in", Order: query.OrderAsc}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "expires_in", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("APIKeyList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, 0, errors.New("error")). + Once() + }, + expected: Expected{ + apiKeys: nil, + count: 0, + err: errors.New("error"), + }, + }, + { + description: "succeeds", + req: &requests.ListAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "expires_in", Order: query.OrderAsc}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "expires_in", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("APIKeyList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + []models.APIKey{ + { + CreatedBy: "id", + Name: "nameAPIKey", + }, + }, + 1, + nil, + ). + Once() + }, + expected: Expected{ + apiKeys: []models.APIKey{ + { + CreatedBy: "id", + Name: "nameAPIKey", + }, + }, + count: 1, + err: nil, + }, + }, + } + + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + s := NewService(storeMock, privateKey, &privateKey.PublicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + tc.requiredMocks(ctx) + + apiKeys, count, err := s.ListAPIKeys(ctx, tc.req) + require.Equal(t, tc.expected, Expected{apiKeys, count, err}) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestUpdateAPIKey(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + cases := []struct { + description string + req *requests.UpdateAPIKey + requiredMocks func(context.Context) + expected error + }{ + { + description: "fails when namespaces does not exists", + req: &requests.UpdateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + CurrentName: "dev", + Name: "newName", + Role: "administrator", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, errors.New("error")). + Once() + }, + expected: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", errors.New("error")), + }, + { + description: "fails when role is greater than user's role", + req: &requests.UpdateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + CurrentName: "dev", + Name: "newName", + Role: "owner", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{Members: []models.Member{{ID: "000000000000000000000000", Role: "administrator"}}}, nil). + Once() + }, + expected: NewErrRoleInvalid(), + }, + { + description: "fails when api key does not exist for resolve", + req: &requests.UpdateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + CurrentName: "nonexistent", + Name: "newName", + Role: "administrator", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{Members: []models.Member{{ID: "000000000000000000000000", Role: "owner"}}}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyNameResolver, "nonexistent", mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + }, + expected: NewErrAPIKeyNotFound("nonexistent", store.ErrNoDocuments), + }, + { + description: "fails when a conflict is found", + req: &requests.UpdateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + CurrentName: "dev", + Name: "newName", + Role: "administrator", + }, + requiredMocks: func(ctx context.Context) { + existingAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "dev", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "operator", + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{Members: []models.Member{{ID: "000000000000000000000000", Role: "owner"}}}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyNameResolver, "dev", mock.AnythingOfType("store.QueryOption")). + Return(existingAPIKey, nil). + Once() + storeMock. + On("APIKeyConflicts", ctx, "00000000-0000-4000-0000-000000000000", &models.APIKeyConflicts{Name: "newName"}). + Return([]string{"name"}, true, nil). + Once() + }, + expected: NewErrAPIKeyDuplicated([]string{"name"}), + }, + { + description: "fails when api key save fails", + req: &requests.UpdateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + CurrentName: "dev", + Name: "newName", + Role: "administrator", + }, + requiredMocks: func(ctx context.Context) { + existingAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "dev", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "operator", + } + + updatedAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "newName", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "administrator", + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{Members: []models.Member{{ID: "000000000000000000000000", Role: "owner"}}}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyNameResolver, "dev", mock.AnythingOfType("store.QueryOption")). + Return(existingAPIKey, nil). + Once() + storeMock. + On("APIKeyConflicts", ctx, "00000000-0000-4000-0000-000000000000", &models.APIKeyConflicts{Name: "newName"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("APIKeyUpdate", ctx, updatedAPIKey). + Return(errors.New("save error")). + Once() + }, + expected: errors.New("save error"), + }, + { + description: "succeeds", + req: &requests.UpdateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + CurrentName: "dev", + Name: "newName", + Role: "administrator", + }, + requiredMocks: func(ctx context.Context) { + existingAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "dev", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "operator", + } + + updatedAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "newName", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "administrator", + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{Members: []models.Member{{ID: "000000000000000000000000", Role: "owner"}}}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyNameResolver, "dev", mock.AnythingOfType("store.QueryOption")). + Return(existingAPIKey, nil). + Once() + storeMock. + On("APIKeyConflicts", ctx, "00000000-0000-4000-0000-000000000000", &models.APIKeyConflicts{Name: "newName"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("APIKeyUpdate", ctx, updatedAPIKey). + Return(nil). + Once() + }, + expected: nil, + }, + { + description: "succeeds whithout updating the name", + req: &requests.UpdateAPIKey{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + CurrentName: "dev", + Name: "dev", // mesmo nome + Role: "administrator", + }, + requiredMocks: func(ctx context.Context) { + existingAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "dev", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "operator", + } + + updatedAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "dev", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "administrator", + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{Members: []models.Member{{ID: "000000000000000000000000", Role: "owner"}}}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyNameResolver, "dev", mock.AnythingOfType("store.QueryOption")). + Return(existingAPIKey, nil). + Once() + storeMock. + On("APIKeyUpdate", ctx, updatedAPIKey). + Return(nil). + Once() + }, + expected: nil, + }, + } + + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + s := NewService(storeMock, privateKey, &privateKey.PublicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + tc.requiredMocks(ctx) + + err := s.UpdateAPIKey(ctx, tc.req) + require.Equal(t, tc.expected, err) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestDeleteAPIKey(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + cases := []struct { + description string + req *requests.DeleteAPIKey + requiredMocks func(context.Context) + expected error + }{ + { + description: "fails when api key does not exist for resolve", + req: &requests.DeleteAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "nonexistent", + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyNameResolver, "nonexistent", mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + }, + expected: NewErrAPIKeyNotFound("nonexistent", store.ErrNoDocuments), + }, + { + description: "fails when api key delete fails", + req: &requests.DeleteAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "dev", + }, + requiredMocks: func(ctx context.Context) { + existingAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "dev", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "operator", + } + + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyNameResolver, "dev", mock.AnythingOfType("store.QueryOption")). + Return(existingAPIKey, nil). + Once() + storeMock. + On("APIKeyDelete", ctx, existingAPIKey). + Return(errors.New("delete error")). + Once() + }, + expected: errors.New("delete error"), + }, + { + description: "succeeds", + req: &requests.DeleteAPIKey{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "dev", + }, + requiredMocks: func(ctx context.Context) { + existingAPIKey := &models.APIKey{ + ID: "existing-id", + Name: "dev", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "operator", + } + + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("APIKeyResolve", ctx, store.APIKeyNameResolver, "dev", mock.AnythingOfType("store.QueryOption")). + Return(existingAPIKey, nil). + Once() + storeMock. + On("APIKeyDelete", ctx, existingAPIKey). + Return(nil). + Once() + }, + expected: nil, + }, + } + + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + s := NewService(storeMock, privateKey, &privateKey.PublicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + tc.requiredMocks(ctx) + + err := s.DeleteAPIKey(ctx, tc.req) + require.Equal(t, tc.expected, err) + }) + } + + storeMock.AssertExpectations(t) +} diff --git a/api/services/auth.go b/api/services/auth.go index 094f9c7ac62..f32ea800a64 100644 --- a/api/services/auth.go +++ b/api/services/auth.go @@ -10,279 +10,474 @@ import ( "encoding/base64" "encoding/hex" "encoding/pem" + "net" + "slices" "strings" "time" "github.com/cnf/structhash" - "github.com/golang-jwt/jwt/v4" + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/api/jwttoken" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/uuid" + log "github.com/sirupsen/logrus" ) type AuthService interface { AuthCacheToken(ctx context.Context, tenant, id, token string) error AuthIsCacheToken(ctx context.Context, tenant, id string) (bool, error) AuthUncacheToken(ctx context.Context, tenant, id string) error - AuthDevice(ctx context.Context, req requests.DeviceAuth, remoteAddr string) (*models.DeviceAuthResponse, error) - AuthUser(ctx context.Context, model *models.UserAuthRequest) (*models.UserAuthResponse, error) - AuthGetToken(ctx context.Context, id string, mfa bool) (*models.UserAuthResponse, error) + + // AuthDevice authenticates a device, creating it if it doesn't exist. Returns a JWT token and device metadata for successful authentication. + // It also updates session timestamps for backward compatibility with older agent. + AuthDevice(ctx context.Context, req requests.DeviceAuth) (*models.DeviceAuthResponse, error) + // AuthLocalUser attempts to authenticate a user with origin [github.com/shellhub-io/shellhub/pkg/models.UserOriginLocal] + // using the provided credentials. Users can be blocked from authentications when they makes 3 password mistakes or when + // they have MFA enabled (which is a cloud-only feature). + // + // It will try to use the user's preferred namespace or the first one to which the user was added. As the + // authentication key is a JWT, in these cases, the response does not contain the member role to avoid creating + // a stateful token. The role must be added in the auth middleware. The TenantID in the response will be empty if the user + // is not a member of any namespace or if the user's membership status is pending. + // + // It returns a timestamp when the block ends if the user is locked out, a token to be used with the OTP code if the MFA + // is enabled and an error, if any + AuthLocalUser(ctx context.Context, req *requests.AuthLocalUser, sourceIP string) (res *models.UserAuthResponse, lockout int64, mfaToken string, err error) + // CreateUserToken is similar to [AuthService.AuthUser] but bypasses credential verification and never blocks. + // + // It accepts an optional tenant ID to associate the token with a namespace. If the tenant ID is empty, it uses the user's + // preferred namespace or the first namespace to which the user was added; if the user's membership status is pending, it + // returns an NamespaceNotFound error. + // + // It returns the created token and an error if any. + CreateUserToken(ctx context.Context, req *requests.CreateUserToken) (res *models.UserAuthResponse, err error) + // GetUserRole get the user's role. It returns the user's role and an error, if any. + GetUserRole(ctx context.Context, tenantID, userID string) (role string, err error) + // AuthAPIKey authenticates the given key, returning its API key document. An API key can be used + // in place of a JWT token to authenticate requests. The key is only related to a namespace and not to a user, + // which means that some routes are blocked from authentication within this method. An API key can be expired, + // rendering it invalid. It returns the API key and an error if any. + // + // The key is cached for 2 minutes after use, so requests made within this period will treat the key as valid. + AuthAPIKey(ctx context.Context, key string) (apiKey *models.APIKey, err error) + AuthPublicKey(ctx context.Context, req requests.PublicKeyAuth) (*models.PublicKeyAuthResponse, error) - AuthSwapToken(ctx context.Context, ID, tenant string) (*models.UserAuthResponse, error) - AuthUserInfo(ctx context.Context, username, tenant, token string) (*models.UserAuthResponse, error) - AuthMFA(ctx context.Context, id string) (bool, error) PublicKey() *rsa.PublicKey } -func (s *service) AuthDevice(ctx context.Context, req requests.DeviceAuth, remoteAddr string) (*models.DeviceAuthResponse, error) { - var identity *models.DeviceIdentity - if req.Identity != nil { - identity = &models.DeviceIdentity{ - MAC: req.Identity.MAC, +func (s *service) AuthDevice(ctx context.Context, req requests.DeviceAuth) (*models.DeviceAuthResponse, error) { + namespace, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil { + return nil, NewErrNamespaceNotFound(req.TenantID, err) + } + + if req.Identity == nil { + return nil, NewErrAuthDeviceNoIdentity() + } + + hostname := req.Hostname + if hostname == "" { + if req.Identity.MAC != "" { + hostname = strings.ReplaceAll(req.Identity.MAC, ":", "-") + } else { + return nil, NewErrAuthDeviceNoIdentityAndHostname() } } + auth := models.DeviceAuth{ - Hostname: req.Hostname, - Identity: identity, + Hostname: strings.ToLower(hostname), + Identity: &models.DeviceIdentity{MAC: req.Identity.MAC}, PublicKey: req.PublicKey, TenantID: req.TenantID, } - uid := sha256.Sum256(structhash.Dump(auth, 1)) + uidSHA := sha256.Sum256(structhash.Dump(auth, 1)) + uid := hex.EncodeToString(uidSHA[:]) - key := hex.EncodeToString(uid[:]) - - token, err := jwttoken.New(). - WithMethod(jwt.SigningMethodRS256). - WithClaims(&models.DeviceAuthClaims{ - UID: key, - AuthClaims: models.AuthClaims{ - Claims: "device", - }, - }). - WithPrivateKey(s.privKey). - Sign() + token, err := jwttoken.EncodeDeviceClaims(authorizer.DeviceClaims{UID: uid, TenantID: req.TenantID}, s.privKey) if err != nil { return nil, NewErrTokenSigned(err) } - type Device struct { - Name string - Namespace string + cachedData := make(map[string]string) + if err := s.cache.Get(ctx, "auth_device/"+uid, &cachedData); err == nil && cachedData["device_name"] != "" { + resp := &models.DeviceAuthResponse{ + UID: uid, + Token: token, + Name: cachedData["device_name"], + Namespace: cachedData["namespace_name"], + } + + return resp, nil } - var value *Device + device, err := s.store.DeviceResolve(ctx, store.DeviceUIDResolver, uid) + if err != nil { + if err != store.ErrNoDocuments { + return nil, err + } + + position, err := s.locator.GetPosition(net.ParseIP(req.RealIP)) + if err != nil { + return nil, err + } + + device = &models.Device{ + CreatedAt: clock.Now(), + UID: uid, + TenantID: req.TenantID, + LastSeen: clock.Now(), + DisconnectedAt: nil, + Status: models.DeviceStatusPending, + StatusUpdatedAt: clock.Now(), + Name: strings.ToLower(hostname), + Identity: &models.DeviceIdentity{MAC: req.Identity.MAC}, + PublicKey: req.PublicKey, + RemoteAddr: req.RealIP, + Taggable: models.Taggable{TagIDs: []string{}, Tags: nil}, + Position: &models.DevicePosition{Longitude: position.Longitude, Latitude: position.Latitude}, + } + + if req.Info != nil { + device.Info = &models.DeviceInfo{ + ID: req.Info.ID, + PrettyName: req.Info.PrettyName, + Version: req.Info.Version, + Arch: req.Info.Arch, + Platform: req.Info.Platform, + } + } + + if _, err := s.store.DeviceCreate(ctx, device); err != nil { + return nil, NewErrDeviceCreate(models.Device{}, err) + } + + if err := s.store.NamespaceIncrementDeviceCount(ctx, req.TenantID, device.Status, 1); err != nil { + return nil, err + } + } else { + device.LastSeen = clock.Now() + device.DisconnectedAt = nil + + if device.RemovedAt != nil { + device.Status = models.DeviceStatusPending + if err := s.store.NamespaceIncrementDeviceCount(ctx, req.TenantID, models.DeviceStatusPending, 1); err != nil { + return nil, err + } + } + + if req.Info != nil { + device.Info = &models.DeviceInfo{ + ID: req.Info.ID, + PrettyName: req.Info.PrettyName, + Version: req.Info.Version, + Arch: req.Info.Arch, + Platform: req.Info.Platform, + } + } + + if err := s.store.DeviceUpdate(ctx, device); err != nil { + log.WithError(err).Error("failed to updated device to online") - if err := s.cache.Get(ctx, strings.Join([]string{"auth_device", key}, "/"), &value); err == nil && value != nil { - return &models.DeviceAuthResponse{ - UID: key, - Token: token.String(), - Name: value.Name, - Namespace: value.Namespace, - }, nil + return nil, err + } } - var info *models.DeviceInfo - if req.Info != nil { - info = &models.DeviceInfo{ - ID: req.Info.ID, - PrettyName: req.Info.PrettyName, - Version: req.Info.Version, - Arch: req.Info.Arch, - Platform: req.Info.Platform, + + for _, sessionUID := range req.Sessions { + session, err := s.store.SessionResolve(ctx, store.SessionUIDResolver, sessionUID) + if err != nil { + log.WithError(err).WithField("session_uid", sessionUID).Warn("cannot resolve session") + + continue + } + + if session.Closed { + continue + } + + session.LastSeen = clock.Now() + if err := s.store.SessionUpdate(ctx, session); err != nil { + log.WithError(err).WithField("session_uid", sessionUID).Warn("cannot set session's last seen") + + continue + } + + activeSession, err := s.store.ActiveSessionResolve(ctx, store.SessionUIDResolver, sessionUID) + if err != nil { + log.WithError(err).WithField("session_uid", sessionUID).Warn("cannot resolve active session") + + continue + } + + activeSession.LastSeen = session.LastSeen + if err := s.store.ActiveSessionUpdate(ctx, activeSession); err != nil { + log.WithError(err).WithField("session_uid", sessionUID).Warn("cannot update active session's last seen") } } - device := models.Device{ - UID: key, - Identity: identity, - Info: info, - PublicKey: req.PublicKey, - TenantID: req.TenantID, - LastSeen: clock.Now(), - RemoteAddr: remoteAddr, + + cachedData["device_name"] = device.Name + cachedData["namespace_name"] = namespace.Name + if err := s.cache.Set(ctx, "auth_device/"+uid, cachedData, time.Second*30); err != nil { + log.WithError(err).Warn("cannot store device authentication metadata in cache") } - // The order here is critical as we don't want to register devices if the tenant id is invalid - namespace, err := s.store.NamespaceGet(ctx, device.TenantID) - if err != nil { - return nil, NewErrNamespaceNotFound(device.TenantID, err) + resp := &models.DeviceAuthResponse{ + UID: uid, + Token: token, + Name: cachedData["device_name"], + Namespace: cachedData["namespace_name"], } - hostname := strings.ToLower(req.Hostname) + return resp, nil +} - if err := s.store.DeviceCreate(ctx, device, hostname); err != nil { - return nil, NewErrDeviceCreate(device, err) +func (s *service) AuthLocalUser(ctx context.Context, req *requests.AuthLocalUser, sourceIP string) (*models.UserAuthResponse, int64, string, error) { + if s, err := s.store.SystemGet(ctx); err != nil || !s.Authentication.Local.Enabled { + return nil, 0, "", NewErrAuthMethodNotAllowed(models.UserAuthMethodLocal.String()) } - for _, uid := range req.Sessions { - if err := s.store.SessionSetLastSeen(ctx, models.UID(uid)); err != nil { - continue - } + resolver := store.UserUsernameResolver + if req.Identifier.IsEmail() { + resolver = store.UserEmailResolver } - dev, err := s.store.DeviceGetByUID(ctx, models.UID(device.UID), device.TenantID) + user, err := s.store.UserResolve(ctx, resolver, strings.ToLower(string(req.Identifier))) if err != nil { - return nil, NewErrDeviceNotFound(models.UID(device.UID), err) + return nil, 0, "", NewErrAuthUnathorized(nil) } - if err := s.cache.Set(ctx, strings.Join([]string{"auth_device", key}, "/"), &Device{Name: dev.Name, Namespace: namespace.Name}, time.Second*30); err != nil { - return nil, err + + if !slices.Contains(user.Preferences.AuthMethods, models.UserAuthMethodLocal) { + return nil, 0, "", NewErrAuthUnathorized(nil) } - return &models.DeviceAuthResponse{ - UID: key, - Token: token.String(), - Name: dev.Name, - Namespace: namespace.Name, - }, nil -} + switch user.Status { + case models.UserStatusNotConfirmed: + return nil, 0, "", NewErrUserNotConfirmed(nil) + default: + break + } -func (s *service) AuthUser(ctx context.Context, model *models.UserAuthRequest) (*models.UserAuthResponse, error) { - var err error - var user *models.User + // Checks whether the user is currently blocked from new login attempts + if lockout, attempt, _ := s.cache.HasAccountLockout(ctx, sourceIP, user.ID); lockout > 0 { + log. + WithFields(log.Fields{ + "lockout": lockout, + "attempt": attempt, + "source_ip": sourceIP, + "user_id": user.ID, + }). + Warn("attempt to login blocked") - if model.Identifier.IsEmail() { - user, err = s.store.UserGetByEmail(ctx, strings.ToLower(string(model.Identifier))) - } else { - user, err = s.store.UserGetByUsername(ctx, strings.ToLower(string(model.Identifier))) + return nil, lockout, "", NewErrAuthUnathorized(nil) } - if err != nil { - return nil, NewErrAuthUnathorized(nil) + if !user.Password.Compare(req.Password) { + lockout, _, err := s.cache.StoreLoginAttempt(ctx, sourceIP, user.ID) + if err != nil { + log.WithError(err). + WithField("source_ip", sourceIP). + WithField("user_id", user.ID). + Warn("unable to store login attempt") + } + + return nil, lockout, "", NewErrAuthUnathorized(nil) } - if !user.Confirmed { - return nil, NewErrUserNotConfirmed(nil) + // Reset the attempt and timeout values when succeeds + if err := s.cache.ResetLoginAttempts(ctx, sourceIP, user.ID); err != nil { + log.WithError(err). + WithField("source_ip", sourceIP). + WithField("user_id", user.ID). + Warn("unable to reset authentication attempts") } - namespace, _ := s.store.NamespaceGetFirst(ctx, user.ID) + // Users with MFA enabled must authenticate to the cloud instead of community. + if user.MFA.Enabled { + mfaToken := uuid.Generate() + if err := s.cache.Set(ctx, "mfa-token={"+mfaToken+"}", user.ID, 30*time.Minute); err != nil { + log.WithError(err). + WithField("source_ip", sourceIP). + WithField("user_id", user.ID). + Warn("unable to store mfa-token") + } - var role string - var tenant string + return nil, 0, mfaToken, nil + } - if namespace != nil { - tenant = namespace.TenantID - if member, _ := namespace.FindMember(user.ID); member != nil { - role = member.Role + tenantID := "" + role := "" + if ns, _ := s.store.NamespaceGetPreferred(ctx, user.ID); ns != nil && ns.TenantID != "" { + if m, _ := ns.FindMember(user.ID); m != nil { + tenantID = ns.TenantID + role = m.Role.String() } } - if !user.UserPassword.Compare(models.NewUserPassword(model.Password)) { - return nil, NewErrAuthUnathorized(nil) + claims := authorizer.UserClaims{ + ID: user.ID, + Origin: user.Origin.String(), + TenantID: tenantID, + Username: user.Username, + MFA: user.MFA.Enabled, + Admin: user.Admin, } - status, err := s.AuthMFA(ctx, user.ID) + token, err := jwttoken.EncodeUserClaims(claims, s.privKey) if err != nil { - return nil, NewErrUserNotFound(user.ID, err) - } - - token, err := jwttoken.New(). - WithMethod(jwt.SigningMethodRS256). - WithExpire(clock.Now().Add(time.Hour * 72)). - WithClaims(&models.UserAuthClaims{ - ID: user.ID, - Tenant: tenant, - Role: role, - Admin: true, - Username: user.Username, - MFA: models.MFA{ - Enable: status, - Validate: false, - }, - AuthClaims: models.AuthClaims{ - Claims: "user", - }, - }). - WithPrivateKey(s.privKey). - Sign() - if err != nil { - return nil, NewErrTokenSigned(err) + return nil, 0, "", NewErrTokenSigned(err) } + // Updates last_login and the hash algorithm to bcrypt if still using SHA256 user.LastLogin = clock.Now() + user.Preferences.PreferredNamespace = tenantID + if !strings.HasPrefix(user.Password.Hash, "$") { + if neo, _ := models.HashUserPassword(req.Password); neo.Hash != "" { + user.Password = neo + } + } - if err := s.store.UserUpdateData(ctx, user.ID, *user); err != nil { - return nil, NewErrUserUpdate(user, err) + // TODO: evaluate make this update in a go routine. + if err := s.store.UserUpdate(ctx, user); err != nil { + return nil, 0, "", NewErrUserUpdate(user, err) } - s.AuthCacheToken(ctx, tenant, user.ID, token.String()) // nolint: errcheck + if err := s.AuthCacheToken(ctx, tenantID, user.ID, token); err != nil { + log.WithError(err). + WithFields(log.Fields{"id": user.ID}). + Warn("unable to cache the authentication token") + } - return &models.UserAuthResponse{ - Token: token.String(), - Name: user.Name, - ID: user.ID, - User: user.Username, - Tenant: tenant, - Role: role, - Email: user.Email, - MFA: models.MFA{ - Enable: status, - Validate: false, - }, - }, nil + res := &models.UserAuthResponse{ + ID: user.ID, + Origin: user.Origin.String(), + AuthMethods: user.Preferences.AuthMethods, + User: user.Username, + Name: user.Name, + Email: user.Email, + RecoveryEmail: user.RecoveryEmail, + MFA: user.MFA.Enabled, + Tenant: tenantID, + Role: role, + Token: token, + MaxNamespaces: user.MaxNamespaces, + Admin: user.Admin, + } + + return res, 0, "", nil } -func (s *service) AuthGetToken(ctx context.Context, id string, mfa bool) (*models.UserAuthResponse, error) { - user, _, err := s.store.UserGetByID(ctx, id, false) +func (s *service) CreateUserToken(ctx context.Context, req *requests.CreateUserToken) (*models.UserAuthResponse, error) { + user, err := s.store.UserResolve(ctx, store.UserIDResolver, req.UserID) if err != nil { - return nil, NewErrUserNotFound(id, err) + return nil, NewErrUserNotFound(req.UserID, err) } - namespace, _ := s.store.NamespaceGetFirst(ctx, user.ID) + tenantID := "" + role := "" + + switch req.TenantID { + case "": + // A user may not have a preferred namespace. In such cases, we create a token without it. + namespace, err := s.store.NamespaceGetPreferred(ctx, user.ID) + if err != nil { + break + } + + member, ok := namespace.FindMember(user.ID) + if !ok { + return nil, NewErrNamespaceMemberNotFound(user.ID, nil) + } - var role string - var tenant string - if namespace != nil { - tenant = namespace.TenantID - if member, ok := namespace.FindMember(user.ID); ok { - role = member.Role + tenantID = namespace.TenantID + role = member.Role.String() + default: + namespace, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil { + return nil, NewErrNamespaceNotFound(req.TenantID, err) + } + + member, ok := namespace.FindMember(user.ID) + if !ok { + return nil, NewErrNamespaceMemberNotFound(user.ID, nil) + } + + tenantID = namespace.TenantID + role = member.Role.String() + + if user.Preferences.PreferredNamespace != namespace.TenantID { + user.Preferences.PreferredNamespace = tenantID + // TODO: evaluate make this update in a go routine. + if err := s.store.UserUpdate(ctx, user); err != nil { + return nil, NewErrUserUpdate(user, err) + } } } - status, err := s.AuthMFA(ctx, user.ID) - if err != nil { - return nil, NewErrUserNotFound(id, err) - } - - token, err := jwttoken.New(). - WithMethod(jwt.SigningMethodRS256). - WithExpire(clock.Now().Add(time.Hour * 72)). - WithClaims(&models.UserAuthClaims{ - ID: user.ID, - Tenant: tenant, - Role: role, - Admin: true, - Username: user.Username, - MFA: models.MFA{ - Enable: status, - Validate: mfa, - }, - AuthClaims: models.AuthClaims{ - Claims: "user", - }, - }). - WithPrivateKey(s.privKey). - Sign() + claims := authorizer.UserClaims{ + ID: user.ID, + Origin: user.Origin.String(), + TenantID: tenantID, + Username: user.Username, + MFA: user.MFA.Enabled, + Admin: user.Admin, + } + + token, err := jwttoken.EncodeUserClaims(claims, s.privKey) if err != nil { return nil, NewErrTokenSigned(err) } - s.AuthCacheToken(ctx, tenant, user.ID, token.String()) // nolint: errcheck + if err := s.AuthCacheToken(ctx, tenantID, user.ID, token); err != nil { + log.WithError(err).Warn("unable to cache the user's auth token") + } return &models.UserAuthResponse{ - Token: token.String(), - Name: user.Name, - ID: user.ID, - User: user.Username, - Tenant: tenant, - Role: role, - Email: user.Email, - MFA: models.MFA{ - Enable: status, - Validate: mfa, - }, + ID: user.ID, + Origin: user.Origin.String(), + AuthMethods: user.Preferences.AuthMethods, + User: user.Username, + Name: user.Name, + Email: user.Email, + RecoveryEmail: user.RecoveryEmail, + MFA: user.MFA.Enabled, + Tenant: tenantID, + Role: role, + Token: token, + MaxNamespaces: user.MaxNamespaces, + Admin: user.Admin, }, nil } +func (s *service) AuthAPIKey(ctx context.Context, key string) (*models.APIKey, error) { + apiKey := new(models.APIKey) + if err := s.cache.Get(ctx, "api-key={"+key+"}", apiKey); err != nil { + return nil, err + } + + if apiKey.ID == "" { + keySum := sha256.Sum256([]byte(key)) + hashedKey := hex.EncodeToString(keySum[:]) + + var err error + if apiKey, err = s.store.APIKeyResolve(ctx, store.APIKeyIDResolver, hashedKey); err != nil { + return nil, NewErrAPIKeyNotFound("", err) + } + } + + if !apiKey.IsValid() { + return nil, NewErrAPIKeyInvalid(apiKey.Name) + } + + if err := s.cache.Set(ctx, "api-key={"+key+"}", apiKey, 2*time.Minute); err != nil { + log.WithError(err).Info("Unable to set the api-key in cache") + } + + return apiKey, nil +} + func (s *service) AuthPublicKey(ctx context.Context, req requests.PublicKeyAuth) (*models.PublicKeyAuthResponse, error) { privKey, err := s.store.PrivateKeyGet(ctx, req.Fingerprint) if err != nil { @@ -310,89 +505,18 @@ func (s *service) AuthPublicKey(ctx context.Context, req requests.PublicKeyAuth) }, nil } -func (s *service) AuthSwapToken(ctx context.Context, id, tenant string) (*models.UserAuthResponse, error) { - namespace, err := s.store.NamespaceGet(ctx, tenant) +func (s *service) GetUserRole(ctx context.Context, tenantID, userID string) (string, error) { + ns, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, tenantID) if err != nil { - return nil, NewErrNamespaceNotFound(tenant, err) + return "", err } - user, _, err := s.store.UserGetByID(ctx, id, false) - if err != nil { - return nil, NewErrUserNotFound(id, err) - } - - for _, member := range namespace.Members { - if user.ID == member.ID { - token, err := jwttoken.New(). - WithMethod(jwt.SigningMethodRS256). - WithExpire(clock.Now().Add(time.Hour * 72)). - WithClaims(&models.UserAuthClaims{ - ID: user.ID, - Tenant: tenant, - Role: member.Role, - Admin: true, - Username: user.Username, - AuthClaims: models.AuthClaims{ - Claims: "user", - }, - }). - WithPrivateKey(s.privKey). - Sign() - if err != nil { - return nil, NewErrTokenSigned(err) - } - - s.AuthCacheToken(ctx, tenant, user.ID, token.String()) // nolint: errcheck - - return &models.UserAuthResponse{ - Token: token.String(), - Name: user.Name, - ID: user.ID, - User: user.Username, - Role: member.Role, - Tenant: namespace.TenantID, - Email: user.Email, - }, nil - } + member, ok := ns.FindMember(userID) + if !ok { + return "", NewErrNamespaceMemberNotFound(userID, nil) } - return nil, nil -} - -func (s *service) AuthUserInfo(ctx context.Context, username, tenant, token string) (*models.UserAuthResponse, error) { - user, err := s.store.UserGetByUsername(ctx, username) - if err != nil || user == nil { - return nil, NewErrUserNotFound(username, err) - } - - namespace, _ := s.store.NamespaceGet(ctx, tenant) - - var role string - if namespace != nil { - if member, _ := namespace.FindMember(user.ID); member != nil { - role = member.Role - } - } - - token = strings.Replace(token, "Bearer ", "", 1) - - status, err := s.AuthMFA(ctx, user.ID) - if err != nil { - return nil, NewErrUserNotFound(user.ID, err) - } - - return &models.UserAuthResponse{ - Token: token, - Name: user.Name, - User: user.Username, - Tenant: tenant, - Role: role, - ID: user.ID, - Email: user.Email, - MFA: models.MFA{ - Enable: status, - }, - }, nil + return member.Role.String(), nil } func (s *service) PublicKey() *rsa.PublicKey { @@ -433,12 +557,3 @@ func (s *service) AuthIsCacheToken(ctx context.Context, tenant, id string) (bool func (s *service) AuthUncacheToken(ctx context.Context, tenant, id string) error { return s.cache.Delete(ctx, "token_"+tenant+id) } - -func (s *service) AuthMFA(ctx context.Context, id string) (bool, error) { - status, err := s.store.GetStatusMFA(ctx, id) - if err != nil { - return false, err - } - - return status, nil -} diff --git a/api/services/auth_test.go b/api/services/auth_test.go index 40329a27ada..8f818ec4f66 100644 --- a/api/services/auth_test.go +++ b/api/services/auth_test.go @@ -6,334 +6,2603 @@ import ( "crypto/rsa" "crypto/sha256" "encoding/hex" + "strings" "testing" "time" "github.com/cnf/structhash" "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/jwttoken" "github.com/shellhub-io/shellhub/pkg/api/requests" - storecache "github.com/shellhub-io/shellhub/pkg/cache" + mockcache "github.com/shellhub-io/shellhub/pkg/cache/mocks" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" "github.com/shellhub-io/shellhub/pkg/errors" "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/uuid" + uuidmock "github.com/shellhub-io/shellhub/pkg/uuid/mocks" "github.com/stretchr/testify/assert" - "github.com/undefinedlabs/go-mpatch" + testifymock "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" ) func TestAuthDevice(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(mocks.Store) + cacheMock := new(mockcache.Cache) + clockMock := new(clockmock.Clock) + uuidMock := new(uuidmock.Uuid) - ctx := context.TODO() + now := time.Now() + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) + uuid.DefaultBackend = uuidMock + uuidMock.On("Generate").Return("00000000-0000-0000-0000-000000000000") - authReq := requests.DeviceAuth{ - TenantID: "tenant", - Identity: &requests.DeviceIdentity{ - MAC: "mac", - }, - Sessions: []string{"session"}, + toUID := func(tenantID, hostname, mac, publicKey string) string { + auth := models.DeviceAuth{ + Hostname: strings.ToLower(hostname), + Identity: &models.DeviceIdentity{MAC: mac}, + PublicKey: publicKey, + TenantID: tenantID, + } + + uidSHA := sha256.Sum256(structhash.Dump(auth, 1)) + + return hex.EncodeToString(uidSHA[:]) } - auth := models.DeviceAuth{ - Hostname: authReq.Hostname, - Identity: &models.DeviceIdentity{ - MAC: authReq.Identity.MAC, - }, - PublicKey: authReq.PublicKey, - TenantID: authReq.TenantID, + toToken := func(tenantID, uid string) string { + token, err := jwttoken.EncodeDeviceClaims(authorizer.DeviceClaims{UID: uid, TenantID: tenantID}, privateKey) + require.NoError(t, err) + + return token } - uid := sha256.Sum256(structhash.Dump(auth, 1)) - device := &models.Device{ - UID: hex.EncodeToString(uid[:]), - Identity: &models.DeviceIdentity{ - MAC: authReq.Identity.MAC, - }, - TenantID: authReq.TenantID, - LastSeen: now, - RemoteAddr: "0.0.0.0", + + type Expected struct { + res *models.DeviceAuthResponse + err error } - clockMock.On("Now").Return(now).Twice() - namespace := &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "tenant"} + cases := []struct { + description string + req requests.DeviceAuth + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "fails when tenant does not exist", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, errors.New("error", "store", 0)). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", errors.New("error", "store", 0)), + }, + }, + { + description: "fails to authenticate device due to no identity", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "", + Identity: nil, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrAuthDeviceNoIdentity(), + }, + }, + { + description: "fails to authenticate device due to no identity and hostname", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "", + Identity: &requests.DeviceIdentity{MAC: ""}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrAuthDeviceNoIdentityAndHostname(), + }, + }, + { + description: "fails to resolve the device without ErrNoDocuments error", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "hostname", + Identity: &requests.DeviceIdentity{MAC: ""}, + Info: nil, + PublicKey: "", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "") - mock.On("DeviceCreate", ctx, *device, ""). - Return(nil).Once() - mock.On("SessionSetLastSeen", ctx, models.UID(authReq.Sessions[0])). - Return(nil).Once() - mock.On("DeviceGetByUID", ctx, models.UID(device.UID), device.TenantID). - Return(device, nil).Once() - mock.On("NamespaceGet", ctx, namespace.TenantID). - Return(namespace, nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(nil, errors.New("error", "store", 0)). + Once() + }, + expected: Expected{ + res: nil, + err: errors.New("error", "store", 0), + }, + }, + { + description: "[device exists] fails when cannot update device with info", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "hostname", + Identity: &requests.DeviceIdentity{MAC: ""}, + Info: &requests.DeviceInfo{ + ID: "test", + PrettyName: "Test", + Version: "v0.20.0", + Arch: "arch64", + Platform: "native", + }, + PublicKey: "", + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "") + device := &models.Device{UID: uid, Name: "hostname"} - // Mock time.Now using monkey patch - patch, err := mpatch.PatchMethod(time.Now, func() time.Time { return now }) - assert.NoError(t, err) - defer patch.Unpatch() //nolint:errcheck + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(device, nil). + Once() - privateKey, err := rsa.GenerateKey(rand.Reader, 2048) - assert.NoError(t, err) + expectedDevice := *device + expectedDevice.LastSeen = now + expectedDevice.DisconnectedAt = nil + expectedDevice.Info = &models.DeviceInfo{ + ID: "test", + PrettyName: "Test", + Version: "v0.20.0", + Arch: "arch64", + Platform: "native", + } - service := NewService(store.Store(mock), privateKey, &privateKey.PublicKey, storecache.NewNullCache(), clientMock, nil) + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(errors.New("error", "store", 0)). + Once() + }, + expected: Expected{ + res: nil, + err: errors.New("error", "store", 0), + }, + }, + { + description: "[device exists] fails when cannot set device as online", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "hostname", + Identity: &requests.DeviceIdentity{MAC: ""}, + Info: nil, + PublicKey: "", + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "") + device := &models.Device{UID: uid, Name: "hostname"} - authRes, err := service.AuthDevice(ctx, authReq, "0.0.0.0") - assert.NoError(t, err) + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(device, nil). + Once() - assert.Equal(t, device.UID, authRes.UID) - assert.Equal(t, device.Name, authRes.Name) - assert.Equal(t, namespace.Name, authRes.Namespace) - assert.NotEmpty(t, authRes.Token) - assert.Equal(t, device.RemoteAddr, "0.0.0.0") + expectedDevice := *device + expectedDevice.LastSeen = now + expectedDevice.DisconnectedAt = nil - mock.AssertExpectations(t) + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(errors.New("error", "store", 0)). + Once() + }, + expected: Expected{ + res: nil, + err: errors.New("error", "store", 0), + }, + }, + { + description: "[device exists] [without session] succeeds to authenticate device", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "hostname", + Identity: &requests.DeviceIdentity{MAC: ""}, + Info: nil, + PublicKey: "", + Sessions: []string{}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "") + device := &models.Device{UID: uid, Name: "hostname"} + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.LastSeen = now + expectedDevice.DisconnectedAt = nil + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "hostname", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "hostname", "", ""), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "")), + Name: "hostname", + Namespace: "test", + }, + err: nil, + }, + }, + { + description: "[device exists] succeeds to authenticate device with closed sessions", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "hostname", + Identity: &requests.DeviceIdentity{MAC: ""}, + PublicKey: "", + Sessions: []string{"session_1", "session_2"}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "") + device := &models.Device{UID: uid, Name: "hostname"} + expectedDevice := *device + expectedDevice.LastSeen = clock.Now() + + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.AnythingOfType("*map[string]string")). + Return(store.ErrNoDocuments). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(device, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() + storeMock. + On("SessionResolve", ctx, store.SessionUIDResolver, "session_1"). + Return(&models.Session{UID: "session_1", Closed: true}, nil). + Once() + storeMock. + On("SessionResolve", ctx, store.SessionUIDResolver, "session_2"). + Return(&models.Session{UID: "session_2", Closed: true}, nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "hostname", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "hostname", "", ""), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "")), + Name: "hostname", + Namespace: "test", + }, + err: nil, + }, + }, + { + description: "[device exists] succeeds to authenticate device with open sessions", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "hostname", + Identity: &requests.DeviceIdentity{MAC: ""}, + PublicKey: "", + Sessions: []string{"session_1", "session_2"}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "") + device := &models.Device{UID: uid, Name: "hostname"} + expectedDevice := *device + expectedDevice.LastSeen = clock.Now() + + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.AnythingOfType("*map[string]string")). + Return(store.ErrNoDocuments). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(device, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() + storeMock. + On("SessionResolve", ctx, store.SessionUIDResolver, "session_1"). + Return(&models.Session{UID: "session_1", Closed: false}, nil). + Once() + storeMock. + On("SessionUpdate", ctx, testifymock.MatchedBy(func(s *models.Session) bool { return s.UID == "session_1" })). + Return(nil). + Once() + storeMock. + On("ActiveSessionResolve", ctx, store.SessionUIDResolver, "session_1"). + Return(&models.ActiveSession{UID: "session_1"}, nil). + Once() + storeMock. + On("ActiveSessionUpdate", ctx, testifymock.MatchedBy(func(as *models.ActiveSession) bool { return as.UID == "session_1" })). + Return(nil). + Once() + storeMock. + On("SessionResolve", ctx, store.SessionUIDResolver, "session_2"). + Return(&models.Session{UID: "session_2", Closed: false}, nil). + Once() + storeMock. + On("SessionUpdate", ctx, testifymock.MatchedBy(func(s *models.Session) bool { return s.UID == "session_2" })). + Return(nil). + Once() + storeMock. + On("ActiveSessionResolve", ctx, store.SessionUIDResolver, "session_2"). + Return(&models.ActiveSession{UID: "session_2"}, nil). + Once() + storeMock. + On("ActiveSessionUpdate", ctx, testifymock.MatchedBy(func(as *models.ActiveSession) bool { return as.UID == "session_2" })). + Return(nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "hostname", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "hostname", "", ""), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "")), + Name: "hostname", + Namespace: "test", + }, + err: nil, + }, + }, + { + description: "[device exists] succeeds to authenticate device with info", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "hostname", + Identity: &requests.DeviceIdentity{MAC: ""}, + Info: &requests.DeviceInfo{ + ID: "test", + PrettyName: "Test", + Version: "v0.20.0", + Arch: "arch64", + Platform: "native", + }, + PublicKey: "", + Sessions: []string{}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "") + device := &models.Device{UID: uid, Name: "hostname"} + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.LastSeen = now + expectedDevice.DisconnectedAt = nil + expectedDevice.Info = &models.DeviceInfo{ + ID: "test", + PrettyName: "Test", + Version: "v0.20.0", + Arch: "arch64", + Platform: "native", + } + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "hostname", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "hostname", "", ""), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "")), + Name: "hostname", + Namespace: "test", + }, + err: nil, + }, + }, + { + description: "[device exists] succeeds to authenticate a removed device", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "hostname", + Identity: &requests.DeviceIdentity{MAC: ""}, + Info: nil, + PublicKey: "", + Sessions: []string{}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "") + device := &models.Device{UID: uid, Name: "hostname", RemovedAt: &now, Status: models.DeviceStatusRemoved} + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(device, nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-4000-0000-000000000000", models.DeviceStatusPending, int64(1)). + Return(nil). + Once() + + expectedDevice := *device + expectedDevice.LastSeen = now + expectedDevice.DisconnectedAt = nil + expectedDevice.RemovedAt = &now + expectedDevice.Status = models.DeviceStatusPending + + storeMock. + On("DeviceUpdate", ctx, device). + Return(nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "hostname", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "hostname", "", ""), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "hostname", "", "")), + Name: "hostname", + Namespace: "test", + }, + err: nil, + }, + }, + { + description: "[device creation] fails when device creation fails", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "new-device", + Identity: &requests.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + Info: &requests.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + PublicKey: "public-key", + Sessions: []string{}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key") + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(nil, store.ErrNoDocuments). + Once() + storeMock. + On( + "DeviceCreate", + ctx, + &models.Device{ + CreatedAt: now, + UID: uid, + TenantID: "00000000-0000-4000-0000-000000000000", + LastSeen: now, + DisconnectedAt: nil, + Status: models.DeviceStatusPending, + StatusUpdatedAt: now, + Name: "new-device", + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + PublicKey: "public-key", + RemoteAddr: "127.0.0.1", + Taggable: models.Taggable{TagIDs: []string{}}, + Position: &models.DevicePosition{Longitude: 0., Latitude: 0.}, + Info: &models.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + }, + ). + Return("", errors.New("database error", "store", 0)). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrDeviceCreate(models.Device{}, errors.New("database error", "store", 0)), + }, + }, + { + description: "[device creation] fails when namespace increment fails", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "new-device", + Identity: &requests.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + PublicKey: "public-key", + Info: &requests.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + Sessions: []string{}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key") + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(nil, store.ErrNoDocuments). + Once() + storeMock. + On( + "DeviceCreate", + ctx, + &models.Device{ + CreatedAt: now, + UID: uid, + TenantID: "00000000-0000-4000-0000-000000000000", + LastSeen: now, + DisconnectedAt: nil, + Status: models.DeviceStatusPending, + StatusUpdatedAt: now, + Name: "new-device", + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + PublicKey: "public-key", + RemoteAddr: "127.0.0.1", + Taggable: models.Taggable{TagIDs: []string{}}, + Position: &models.DevicePosition{Longitude: 0., Latitude: 0.}, + Info: &models.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + }, + ). + Return(uid, nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-4000-0000-000000000000", models.DeviceStatusPending, int64(1)). + Return(errors.New("increment error", "store", 0)). + Once() + }, + expected: Expected{ + res: nil, + err: errors.New("increment error", "store", 0), + }, + }, + { + description: "[device creation] succeeds to create and authenticate new device", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "new-device", + Identity: &requests.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + Info: &requests.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + PublicKey: "public-key", + Sessions: []string{"session_1", "session_2"}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key") + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(nil, store.ErrNoDocuments). + Once() + storeMock. + On( + "DeviceCreate", + ctx, + &models.Device{ + CreatedAt: now, + UID: uid, + TenantID: "00000000-0000-4000-0000-000000000000", + LastSeen: now, + DisconnectedAt: nil, + Status: models.DeviceStatusPending, + StatusUpdatedAt: now, + Name: "new-device", + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + PublicKey: "public-key", + RemoteAddr: "127.0.0.1", + Taggable: models.Taggable{TagIDs: []string{}}, + Position: &models.DevicePosition{Longitude: 0., Latitude: 0.}, + Info: &models.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + }, + ). + Return(uid, nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-4000-0000-000000000000", models.DeviceStatusPending, int64(1)). + Return(nil). + Once() + storeMock. + On("SessionResolve", ctx, store.SessionUIDResolver, "session_1"). + Return(&models.Session{UID: "session_1", Closed: false}, nil). + Once() + storeMock. + On("ActiveSessionResolve", ctx, store.SessionUIDResolver, "session_1"). + Return(&models.ActiveSession{UID: "session_1"}, nil). + Once() + storeMock. + On("ActiveSessionUpdate", ctx, testifymock.MatchedBy(func(as *models.ActiveSession) bool { return as.UID == "session_1" })). + Return(nil). + Once() + storeMock. + On("SessionUpdate", ctx, testifymock.MatchedBy(func(s *models.Session) bool { return s.UID == "session_1" })). + Return(nil). + Once() + storeMock. + On("SessionResolve", ctx, store.SessionUIDResolver, "session_2"). + Return(&models.Session{UID: "session_2", Closed: false}, nil). + Once() + storeMock. + On("ActiveSessionResolve", ctx, store.SessionUIDResolver, "session_2"). + Return(&models.ActiveSession{UID: "session_2"}, nil). + Once() + storeMock. + On("ActiveSessionUpdate", ctx, testifymock.MatchedBy(func(as *models.ActiveSession) bool { return as.UID == "session_2" })). + Return(nil). + Once() + storeMock. + On("SessionUpdate", ctx, testifymock.MatchedBy(func(s *models.Session) bool { return s.UID == "session_2" })). + Return(nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "new-device", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key"), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key")), + Name: "new-device", + Namespace: "test", + }, + err: nil, + }, + }, + { + description: "[device creation] succeeds to create and authenticate new device with sessions", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "new-device", + Identity: &requests.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + Info: &requests.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + PublicKey: "public-key", + Sessions: []string{}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key") + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(nil, store.ErrNoDocuments). + Once() + storeMock. + On( + "DeviceCreate", + ctx, + &models.Device{ + CreatedAt: now, + UID: uid, + TenantID: "00000000-0000-4000-0000-000000000000", + LastSeen: now, + DisconnectedAt: nil, + Status: models.DeviceStatusPending, + StatusUpdatedAt: now, + Name: "new-device", + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + PublicKey: "public-key", + RemoteAddr: "127.0.0.1", + Taggable: models.Taggable{TagIDs: []string{}}, + Position: &models.DevicePosition{Longitude: 0., Latitude: 0.}, + Info: &models.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + }, + ). + Return(uid, nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-4000-0000-000000000000", models.DeviceStatusPending, int64(1)). + Return(nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "new-device", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key"), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key")), + Name: "new-device", + Namespace: "test", + }, + err: nil, + }, + }, + { + description: "[device creation] succeeds when hostname is derived from MAC", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "", + Identity: &requests.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + PublicKey: "public-key", + Info: &requests.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + Sessions: []string{}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "aa-bb-cc-dd-ee-ff", "aa:bb:cc:dd:ee:ff", "public-key") + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(nil, store.ErrNoDocuments). + Once() + storeMock. + On( + "DeviceCreate", + ctx, + &models.Device{ + CreatedAt: now, + UID: uid, + TenantID: "00000000-0000-4000-0000-000000000000", + LastSeen: now, + DisconnectedAt: nil, + Status: models.DeviceStatusPending, + StatusUpdatedAt: now, + Name: "aa-bb-cc-dd-ee-ff", + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + PublicKey: "public-key", + RemoteAddr: "127.0.0.1", + Taggable: models.Taggable{TagIDs: []string{}}, + Position: &models.DevicePosition{Longitude: 0., Latitude: 0.}, + Info: &models.DeviceInfo{ + ID: "device-id", + PrettyName: "My Device", + Version: "1.0.0", + Arch: "x86_64", + Platform: "linux", + }, + }, + ). + Return(uid, nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-4000-0000-000000000000", models.DeviceStatusPending, int64(1)). + Return(nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "aa-bb-cc-dd-ee-ff", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "aa-bb-cc-dd-ee-ff", "aa:bb:cc:dd:ee:ff", "public-key"), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "aa-bb-cc-dd-ee-ff", "aa:bb:cc:dd:ee:ff", "public-key")), + Name: "aa-bb-cc-dd-ee-ff", + Namespace: "test", + }, + err: nil, + }, + }, + { + description: "[device creation] succeeds to create and authenticate new device with null information", + req: requests.DeviceAuth{ + TenantID: "00000000-0000-4000-0000-000000000000", + Hostname: "new-device", + Identity: &requests.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + Info: nil, + PublicKey: "public-key", + Sessions: []string{"session_1", "session_2"}, + RealIP: "127.0.0.1", + }, + requiredMocks: func(ctx context.Context) { + uid := toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key") + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Name: "test"}, nil). + Once() + cacheMock. + On("Get", ctx, "auth_device/"+uid, testifymock.Anything). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, uid). + Return(nil, store.ErrNoDocuments). + Once() + storeMock. + On( + "DeviceCreate", + ctx, + &models.Device{ + CreatedAt: now, + UID: uid, + TenantID: "00000000-0000-4000-0000-000000000000", + LastSeen: now, + DisconnectedAt: nil, + Status: models.DeviceStatusPending, + StatusUpdatedAt: now, + Name: "new-device", + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + PublicKey: "public-key", + RemoteAddr: "127.0.0.1", + Taggable: models.Taggable{TagIDs: []string{}}, + Position: &models.DevicePosition{Longitude: 0., Latitude: 0.}, + Info: nil, + }, + ). + Return(uid, nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-4000-0000-000000000000", models.DeviceStatusPending, int64(1)). + Return(nil). + Once() + storeMock. + On("SessionResolve", ctx, store.SessionUIDResolver, "session_1"). + Return(&models.Session{UID: "session_1", Closed: false}, nil). + Once() + storeMock. + On("ActiveSessionResolve", ctx, store.SessionUIDResolver, "session_1"). + Return(&models.ActiveSession{UID: "session_1"}, nil). + Once() + storeMock. + On("ActiveSessionUpdate", ctx, testifymock.MatchedBy(func(as *models.ActiveSession) bool { return as.UID == "session_1" })). + Return(nil). + Once() + storeMock. + On("SessionUpdate", ctx, testifymock.MatchedBy(func(s *models.Session) bool { return s.UID == "session_1" })). + Return(nil). + Once() + storeMock. + On("SessionResolve", ctx, store.SessionUIDResolver, "session_2"). + Return(&models.Session{UID: "session_2", Closed: false}, nil). + Once() + storeMock. + On("ActiveSessionResolve", ctx, store.SessionUIDResolver, "session_2"). + Return(&models.ActiveSession{UID: "session_2"}, nil). + Once() + storeMock. + On("ActiveSessionUpdate", ctx, testifymock.MatchedBy(func(as *models.ActiveSession) bool { return as.UID == "session_2" })). + Return(nil). + Once() + storeMock. + On("SessionUpdate", ctx, testifymock.MatchedBy(func(s *models.Session) bool { return s.UID == "session_2" })). + Return(nil). + Once() + cacheMock. + On("Set", ctx, "auth_device/"+uid, map[string]string{"device_name": "new-device", "namespace_name": "test"}, time.Second*30). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.DeviceAuthResponse{ + UID: toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key"), + Token: toToken("00000000-0000-4000-0000-000000000000", toUID("00000000-0000-4000-0000-000000000000", "new-device", "aa:bb:cc:dd:ee:ff", "public-key")), + Name: "new-device", + Namespace: "test", + }, + err: nil, + }, + }, + } + + service := NewService(store.Store(storeMock), privateKey, &privateKey.PublicKey, cacheMock, clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.TODO() + tc.requiredMocks(ctx) + + authRes, err := service.AuthDevice(ctx, tc.req) + require.Equal(tt, tc.expected.res, authRes) + require.Equal(tt, tc.expected.err, err) + }) + } + + storeMock.AssertExpectations(t) } -func TestAuthUser(t *testing.T) { +func TestService_AuthLocalUser(t *testing.T) { mock := new(mocks.Store) + cacheMock := new(mockcache.Cache) ctx := context.TODO() type Expected struct { - userAuthResponse *models.UserAuthResponse - err error + res *models.UserAuthResponse + lockout int64 + mfaToken string + err error } tests := []struct { description string - req requests.UserAuth + req *requests.AuthLocalUser + sourceIP string requiredMocks func() expected Expected - expectedErr error }{ { - description: "Fails when username is not found", - req: requests.UserAuth{ - Username: "user", - Password: "passwd", + description: "fails when could not retrieve the system", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", }, - expectedErr: errors.New("error", "", 0), requiredMocks: func() { - mock.On("UserGetByUsername", ctx, "user").Return(nil, errors.New("error", "", 0)).Once() + mock. + On("SystemGet", ctx). + Return(nil, store.ErrNoDocuments). + Once() + }, + expected: Expected{ + res: nil, + lockout: 0, + mfaToken: "", + err: NewErrAuthMethodNotAllowed(models.UserAuthMethodLocal.String()), + }, + }, + { + description: "fails when local authentication is not allowed", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, + requiredMocks: func() { + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: false, + }, + }, + }, + nil, + ). + Once() + }, + expected: Expected{ + res: nil, + lockout: 0, + mfaToken: "", + err: NewErrAuthMethodNotAllowed(models.UserAuthMethodLocal.String()), + }, + }, + { + description: "fails when username is not found", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, + requiredMocks: func() { + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(nil, store.ErrNoDocuments). + Once() + }, + expected: Expected{ + res: nil, + lockout: 0, + mfaToken: "", + err: NewErrAuthUnathorized(nil), }, - expected: Expected{nil, NewErrAuthUnathorized(nil)}, }, { - description: "Fails when email is not found", - req: requests.UserAuth{ - Username: "user@test.com", - Password: "passwd", + description: "fails when email is not found", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john.doe@test.com", + Password: "secret", }, - expectedErr: errors.New("error", "", 0), requiredMocks: func() { - mock.On("UserGetByEmail", ctx, "user@test.com").Return(nil, errors.New("error", "", 0)).Once() + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + mock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(nil, store.ErrNoDocuments). + Once() + }, + expected: Expected{ + res: nil, + lockout: 0, + mfaToken: "", + err: NewErrAuthUnathorized(nil), }, - expected: Expected{nil, NewErrAuthUnathorized(nil)}, }, { - description: "Fails when user has account but wrong password", - req: requests.UserAuth{ - Username: "user", - Password: "passwd", + description: "fails when user does not have local as authentication method", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", }, requiredMocks: func() { + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusNotConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, UserData: models.UserData{ - Username: "user", + Username: "john_doe", + Email: "john.doe@test.com", + }, + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{}, + }, + } - UserPassword: models.NewUserPassword("wrongPassword"), - ID: "id", - Confirmed: true, - LastLogin: now, + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + }, + expected: Expected{ + res: nil, + lockout: 0, + mfaToken: "", + err: NewErrAuthUnathorized(nil), + }, + }, + { + description: "fails when user is not confirmed", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, + requiredMocks: func() { + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusNotConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + }, + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, } - mock.On("UserGetByUsername", ctx, "user").Return(user, nil).Once() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + }, + expected: Expected{ + res: nil, + lockout: 0, + mfaToken: "", + err: NewErrUserNotConfirmed(nil), + }, + }, + { + description: "fails when an account lockout occurs", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, + requiredMocks: func() { + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } - namespace := &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "tenant", + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(1711510689), 3, nil). + Once() + }, + expected: Expected{ + res: nil, + lockout: 1711510689, + mfaToken: "", + err: NewErrAuthUnathorized(nil), + }, + }, + { + description: "fails when input password is wrong", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "wrong_password", + }, + requiredMocks: func() { + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, } - mock.On("NamespaceGetFirst", ctx, user.ID).Return(namespace, nil).Once() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(0), 0, nil). + Once() + hashMock. + On("CompareWith", "wrong_password", "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(false). + Once() + cacheMock. + On("StoreLoginAttempt", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(1711510689), 3, nil). + Once() + }, + expected: Expected{ + res: nil, + lockout: 1711510689, + mfaToken: "", + err: NewErrAuthUnathorized(nil), }, - expected: Expected{nil, NewErrAuthUnathorized(nil)}, }, - } + { + description: "fails when user has MFA enable", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, + requiredMocks: func() { + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: true, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } - for _, tc := range tests { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(0), 0, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(true). + Once() + cacheMock. + On("ResetLoginAttempts", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(nil). + Once() + uuidMock := &uuidmock.Uuid{} + uuid.DefaultBackend = uuidMock + uuidMock. + On("Generate"). + Return("00000000-0000-4000-0000-000000000000") + cacheMock. + On("Set", ctx, "mfa-token={00000000-0000-4000-0000-000000000000}", "65fdd16b5f62f93184ec8a39", 30*time.Minute). + Return(nil). + Once() + }, + expected: Expected{ + res: nil, + lockout: 0, + mfaToken: "00000000-0000-4000-0000-000000000000", + err: nil, + }, + }, + { + description: "fails when can not update the last_login field", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, + requiredMocks: func() { + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + updatedUser := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } - privateKey, err := rsa.GenerateKey(rand.Reader, 2048) - assert.NoError(t, err) + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(0), 0, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(true). + Once() + cacheMock. + On("ResetLoginAttempts", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(nil). + Once() + mock. + On("NamespaceGetPreferred", ctx, "65fdd16b5f62f93184ec8a39"). + Return(nil, errors.New("error", "layer", 0)). + Once() - service := NewService(store.Store(mock), privateKey, &privateKey.PublicKey, storecache.NewNullCache(), clientMock, nil) - authRes, err := service.AuthUser(ctx, &models.UserAuthRequest{ - Identifier: models.UserAuthIdentifier(tc.req.Username), - Password: tc.req.Password, - }) - assert.Equal(t, tc.expected, Expected{authRes, err}) - }) - } + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) - mock.AssertExpectations(t) -} + cacheMock. + On("Set", ctx, "token_65fdd16b5f62f93184ec8a39", testifymock.Anything, time.Hour*72). + Return(nil). + Once() -func TestAuthUserInfo(t *testing.T) { - mock := new(mocks.Store) + mock. + On("UserUpdate", ctx, updatedUser). + Return(errors.New("error", "", 0)). + Once() + }, + expected: Expected{ + res: nil, + lockout: 0, + mfaToken: "", + err: NewErrUserUpdate( + &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + }, + errors.New("error", "", 0), + ), + }, + }, + { + description: "succeeds to authenticate without a namespace", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, + requiredMocks: func() { + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + updatedUser := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } - ctx := context.TODO() + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(0), 0, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(true). + Once() + cacheMock. + On("ResetLoginAttempts", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(nil). + Once() + mock. + On("NamespaceGetPreferred", ctx, "65fdd16b5f62f93184ec8a39"). + Return(nil, errors.New("error", "layer", 0)). + Once() - type Expected struct { - userAuthResponse *models.UserAuthResponse - err error - } + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) - tests := []struct { - description string - username string - tenantID string - requiredMocks func() - expected Expected - expectedErr error - }{ + cacheMock. + On("Set", ctx, "token_65fdd16b5f62f93184ec8a39", testifymock.Anything, time.Hour*72). + Return(nil). + Once() + + mock. + On("UserUpdate", ctx, updatedUser). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.UserAuthResponse{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal.String(), + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "", + Token: "must ignore", + }, + lockout: 0, + mfaToken: "", + err: nil, + }, + }, { - description: "Fails to find the user", - username: "notuser", - expectedErr: errors.New("error", "", 0), + description: "succeeds to authenticate with a namespace", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, requiredMocks: func() { - mock.On("UserGetByUsername", ctx, "notuser").Return(nil, errors.New("error", "", 0)).Once() + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "00000000-0000-4000-0000-000000000000", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + updatedUser := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "00000000-0000-4000-0000-000000000000", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(0), 0, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(true). + Once() + cacheMock. + On("ResetLoginAttempts", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(nil). + Once() + + ns := &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "65fdd16b5f62f93184ec8a39", + Role: "owner", + }, + }, + } + + mock. + On("NamespaceGetPreferred", ctx, "65fdd16b5f62f93184ec8a39"). + Return(ns, nil). + Once() + + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) + + cacheMock. + On("Set", ctx, "token_00000000-0000-4000-0000-00000000000065fdd16b5f62f93184ec8a39", testifymock.Anything, time.Hour*72). + Return(nil). + Once() + + mock. + On("UserUpdate", ctx, updatedUser). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.UserAuthResponse{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal.String(), + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Token: "must ignore", + }, + lockout: 0, + mfaToken: "", + err: nil, }, - expected: Expected{nil, NewErrUserNotFound("notuser", errors.New("error", "", 0))}, }, { - description: "Successful auth login", - username: "user", - tenantID: "xxxxxx", + description: "succeeds to authenticate with a namespace (and empty preferred namespace)", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, requiredMocks: func() { - namespace := &models.Namespace{ - Name: "namespace", - Owner: "id", - TenantID: "xxxxxx", + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + updatedUser := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "00000000-0000-4000-0000-000000000000", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(0), 0, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(true). + Once() + cacheMock. + On("ResetLoginAttempts", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(nil). + Once() + + ns := &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", Members: []models.Member{ { - ID: "id", + ID: "65fdd16b5f62f93184ec8a39", Role: "owner", }, }, } - mock.On("UserGetByUsername", ctx, "user").Return(&models.User{ + mock. + On("NamespaceGetPreferred", ctx, "65fdd16b5f62f93184ec8a39"). + Return(ns, nil). + Once() + + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) + + cacheMock. + On("Set", ctx, "token_00000000-0000-4000-0000-00000000000065fdd16b5f62f93184ec8a39", testifymock.Anything, time.Hour*72). + Return(nil). + Once() + + mock. + On("UserUpdate", ctx, updatedUser). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.UserAuthResponse{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal.String(), + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Token: "must ignore", + }, + lockout: 0, + mfaToken: "", + err: nil, + }, + }, + { + description: "succeeds to authenticate and update non-bcypt hashes", + sourceIP: "127.0.0.1", + req: &requests.AuthLocalUser{ + Identifier: "john_doe", + Password: "secret", + }, + requiredMocks: func() { + user := &models.User{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, UserData: models.UserData{ - Username: "user", - Name: "user", - Email: "email@email.com", - }, - ID: "id", - }, nil).Once() - mock.On("NamespaceGet", ctx, "xxxxxx").Return(namespace, nil).Once() - mock.On("GetStatusMFA", ctx, "id").Return(false, nil).Once() - }, - expected: Expected{ - userAuthResponse: &models.UserAuthResponse{ - Name: "user", - Token: "---------------token----------------", - User: "user", - Tenant: "xxxxxx", - ID: "id", - Role: "owner", - Email: "email@email.com", + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + + mock. + On("SystemGet", ctx). + Return( + &models.System{ + Authentication: &models.SystemAuthentication{ + Local: &models.SystemAuthenticationLocal{ + Enabled: true, + }, + }, + }, + nil, + ). + Once() + mock. + On("UserResolve", ctx, store.UserUsernameResolver, "john_doe"). + Return(user, nil). + Once() + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(0), 0, nil). + Once() + hashMock. + On("CompareWith", "secret", "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b"). + Return(true). + Once() + cacheMock. + On("ResetLoginAttempts", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(nil). + Once() + mock. + On("NamespaceGetPreferred", ctx, "65fdd16b5f62f93184ec8a39"). + Return(nil, errors.New("error", "layer", 0)). + Once() + + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) + + cacheMock. + On("HasAccountLockout", ctx, "127.0.0.1", "65fdd16b5f62f93184ec8a39"). + Return(int64(0), 0, nil). + Once() + + cacheMock. + On("Set", ctx, "token_65fdd16b5f62f93184ec8a39", testifymock.Anything, time.Hour*72). + Return(nil). + Once() + hashMock. + On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", nil). + Once() + + expectedUser := *user + expectedUser.LastLogin = now + expectedUser.Password.Plain = "secret" + expectedUser.Password.Hash = "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi" + + mock. + On("UserUpdate", ctx, &expectedUser). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.UserAuthResponse{ + ID: "65fdd16b5f62f93184ec8a39", + Origin: models.UserOriginLocal.String(), + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "", + Token: "must ignore", }, - err: nil, + lockout: 0, + mfaToken: "", + err: nil, }, }, } + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + service := NewService(store.Store(mock), privateKey, &privateKey.PublicKey, cacheMock, clientMock) + for _, tc := range tests { t.Run(tc.description, func(t *testing.T) { - mock.ExpectedCalls = nil tc.requiredMocks() - privateKey, err := rsa.GenerateKey(rand.Reader, 2048) - assert.NoError(t, err) + res, lockout, mfaToken, err := service.AuthLocalUser(ctx, tc.req, tc.sourceIP) + // Since the resulting token is not crucial for the assertion and + // difficult to mock, it is safe to ignore this field. + if res != nil { + res.Token = "must ignore" + } - service := NewService(store.Store(mock), privateKey, &privateKey.PublicKey, storecache.NewNullCache(), clientMock, nil) - - authRes, err := service.AuthUserInfo(ctx, tc.username, tc.tenantID, "---------------token----------------") - assert.Equal(t, tc.expected.userAuthResponse, authRes) - assert.Equal(t, tc.expected.err, err) - - mock.AssertExpectations(t) + assert.Equal(t, tc.expected, Expected{res, lockout, mfaToken, err}) }) } -} -func TestAuthGetToken(t *testing.T) { - mock := new(mocks.Store) + mock.AssertExpectations(t) +} - ctx := context.TODO() +func TestCreateUserToken(t *testing.T) { + storeMock := new(mocks.Store) + cacheMock := new(mockcache.Cache) type Expected struct { - userAuthResponse *models.UserAuthResponse - err error + res *models.UserAuthResponse + err error } tests := []struct { description string - userID string - requiredMocks func() + req *requests.CreateUserToken + requiredMocks func(context.Context) expected Expected }{ { - description: "success when try to get a token", - userID: "user", - requiredMocks: func() { - namespace := &models.Namespace{ - Name: "namespace", - Owner: "id", - TenantID: "xxxxxx", - Members: []models.Member{ - { - ID: "memberID", - Role: "owner", + description: "fails when user is not found", + req: &requests.CreateUserToken{UserID: "000000000000000000000000", TenantID: "00000000-0000-4000-0000-000000000000"}, + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(nil, store.ErrNoDocuments). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrUserNotFound("000000000000000000000000", store.ErrNoDocuments), + }, + }, + { + description: "[with-tenant] fails when namespace is not found", + req: &requests.CreateUserToken{UserID: "000000000000000000000000", TenantID: "00000000-0000-4000-0000-000000000000"}, + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return( + &models.User{ + ID: "000000000000000000000000", + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + }, + }, + nil, + ). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, store.ErrNoDocuments). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", store.ErrNoDocuments), + }, + }, + { + description: "[with-tenant] fails when user is not a member of the namespace", + req: &requests.CreateUserToken{UserID: "000000000000000000000000", TenantID: "00000000-0000-4000-0000-000000000000"}, + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return( + &models.User{ + ID: "000000000000000000000000", + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + }, + }, + nil, + ). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{}, }, + nil, + ). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrNamespaceMemberNotFound("000000000000000000000000", nil), + }, + }, + { + description: "[with-tenant] succeeds", + req: &requests.CreateUserToken{UserID: "000000000000000000000000", TenantID: "00000000-0000-4000-0000-000000000000"}, + requiredMocks: func(ctx context.Context) { + user := &models.User{ + ID: "000000000000000000000000", + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", }, } - - mock.On("UserGetByID", ctx, "user", false).Return(&models.User{ + updatedUser := &models.User{ + ID: "000000000000000000000000", + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, UserData: models.UserData{ - Username: "user", - Name: "user", - Email: "email@email.com", + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", }, - ID: "id", - }, 1, nil).Once() - mock.On("NamespaceGetFirst", ctx, "id").Return(namespace, nil).Once() - mock.On("GetStatusMFA", ctx, "id").Return(false, nil).Once() + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "00000000-0000-4000-0000-000000000000", + }, + } - clockMock.On("Now").Return(now).Twice() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(user, nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: "owner", + }, + }, + }, + nil, + ). + Once() + storeMock. + On("UserUpdate", ctx, updatedUser). + Return(nil). + Once() + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) + cacheMock. + On("Set", ctx, "token_00000000-0000-4000-0000-000000000000000000000000000000000000", testifymock.Anything, time.Hour*72). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.UserAuthResponse{ + ID: "000000000000000000000000", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Token: "must ignore", + }, + err: nil, + }, + }, + { + description: "[without-tenant] succeeds when user has a preferred namespace", + req: &requests.CreateUserToken{UserID: "000000000000000000000000", TenantID: ""}, + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return( + &models.User{ + ID: "000000000000000000000000", + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "00000000-0000-4000-0000-000000000000", + }, + }, + nil, + ). + Once() + storeMock. + On("NamespaceGetPreferred", ctx, "000000000000000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: "owner", + }, + }, + }, + nil, + ). + Once() + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) + cacheMock. + On("Set", ctx, "token_00000000-0000-4000-0000-000000000000000000000000000000000000", testifymock.Anything, time.Hour*72). + Return(nil). + Once() }, expected: Expected{ - userAuthResponse: &models.UserAuthResponse{}, - err: nil, + res: &models.UserAuthResponse{ + ID: "000000000000000000000000", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "00000000-0000-4000-0000-000000000000", + Role: "owner", + Token: "must ignore", + }, + err: nil, + }, + }, + { + description: "[without-tenant] succeeds when user doesn't has a preferred namespace", + req: &requests.CreateUserToken{UserID: "000000000000000000000000", TenantID: ""}, + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return( + &models.User{ + ID: "000000000000000000000000", + Status: models.UserStatusConfirmed, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + }, + }, + nil, + ). + Once() + storeMock. + On("NamespaceGetPreferred", ctx, "000000000000000000000000"). + Return(nil, store.ErrNoDocuments). + Once() + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) + cacheMock. + On("Set", ctx, "token_000000000000000000000000", testifymock.Anything, time.Hour*72). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.UserAuthResponse{ + ID: "000000000000000000000000", + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "", + Role: "", + Token: "must ignore", + }, + err: nil, }, }, } + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + s := NewService(store.Store(storeMock), privateKey, &privateKey.PublicKey, cacheMock, clientMock) + for _, tc := range tests { t.Run(tc.description, func(t *testing.T) { - mock.ExpectedCalls = nil - tc.requiredMocks() + ctx := context.Background() + tc.requiredMocks(ctx) + + res, err := s.CreateUserToken(ctx, tc.req) + // Since the resulting token is not crucial for the assertion and + // difficult to mock, it is safe to ignore this field. + if res != nil { + res.Token = "must ignore" + } + + assert.Equal(t, tc.expected, Expected{res, err}) + }) + } + + storeMock.AssertExpectations(t) +} - privateKey, err := rsa.GenerateKey(rand.Reader, 2048) - assert.NoError(t, err) +func TestAuthAPIKey(t *testing.T) { + type Expected struct { + apiKey *models.APIKey + err error + } - service := NewService(mock, privateKey, &privateKey.PublicKey, storecache.NewNullCache(), clientMock, nil) + storeMock := new(mocks.Store) + cacheMock := new(mockcache.Cache) - authRes, err := service.AuthGetToken(ctx, tc.userID, false) - assert.NotNil(t, authRes) - assert.Equal(t, tc.expected.err, err) + tests := []struct { + description string + key string + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "fails when could not get the api key from store", + key: "00000000-0000-4000-0000-000000000000", + requiredMocks: func(ctx context.Context) { + cacheMock. + On("Get", ctx, "api-key={00000000-0000-4000-0000-000000000000}", testifymock.Anything). + Return(nil). + Once() + keySum := sha256.Sum256([]byte("00000000-0000-4000-0000-000000000000")) + hashedKey := hex.EncodeToString(keySum[:]) + storeMock. + On("APIKeyResolve", ctx, store.APIKeyIDResolver, hashedKey). + Return(nil, errors.New("error", "", 0)). + Once() + }, + expected: Expected{ + apiKey: nil, + err: NewErrAPIKeyNotFound("", errors.New("error", "", 0)), + }, + }, + { + description: "fails when the api key is not valid", + key: "00000000-0000-4000-0000-000000000000", + requiredMocks: func(ctx context.Context) { + cacheMock. + On("Get", ctx, "api-key={00000000-0000-4000-0000-000000000000}", testifymock.Anything). + Return(nil). + Once() + keySum := sha256.Sum256([]byte("00000000-0000-4000-0000-000000000000")) + hashedKey := hex.EncodeToString(keySum[:]) + storeMock. + On("APIKeyResolve", ctx, store.APIKeyIDResolver, hashedKey). + Return( + &models.APIKey{ + Name: "dev", + ExpiresIn: time.Date(2000, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC).Unix(), + }, + nil, + ). + Once() + }, + expected: Expected{ + apiKey: nil, + err: NewErrAPIKeyInvalid("dev"), + }, + }, + { + description: "succeeds", + key: "00000000-0000-4000-0000-000000000000", + requiredMocks: func(ctx context.Context) { + cacheMock. + On("Get", ctx, "api-key={00000000-0000-4000-0000-000000000000}", testifymock.Anything). + Return(nil). + Once() + keySum := sha256.Sum256([]byte("00000000-0000-4000-0000-000000000000")) + hashedKey := hex.EncodeToString(keySum[:]) + storeMock. + On("APIKeyResolve", ctx, store.APIKeyIDResolver, hashedKey). + Return( + &models.APIKey{ + Name: "dev", + ExpiresIn: time.Date(3000, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC).Unix(), + }, + nil, + ). + Once() + cacheMock. + On("Set", ctx, "api-key={00000000-0000-4000-0000-000000000000}", &models.APIKey{Name: "dev", ExpiresIn: time.Date(3000, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC).Unix()}, 2*time.Minute). + Return(nil). + Once() + }, + expected: Expected{ + apiKey: &models.APIKey{ + Name: "dev", + ExpiresIn: time.Date(3000, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC).Unix(), + }, + err: nil, + }, + }, + } + + privKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + service := NewService(storeMock, privKey, &privKey.PublicKey, cacheMock, clientMock) - mock.AssertExpectations(t) + for _, tc := range tests { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + tc.requiredMocks(ctx) + apiKey, err := service.AuthAPIKey(ctx, tc.key) + require.Equal(t, tc.expected, Expected{apiKey, err}) }) } + + storeMock.AssertExpectations(t) } diff --git a/api/services/billing.go b/api/services/billing.go index b961a281df8..f75e31e0d5b 100644 --- a/api/services/billing.go +++ b/api/services/billing.go @@ -1,17 +1,20 @@ package services import ( + "context" + "errors" + req "github.com/shellhub-io/shellhub/pkg/api/internalclient" ) type BillingInterface interface { - BillingEvaluate(req.Client, string) (bool, error) - BillingReport(req.Client, string, string) error + BillingEvaluate(ctx context.Context, client req.Client, tenant string) (bool, error) + BillingReport(ctx context.Context, client req.Client, tenant string, action string) error } // BillingEvaluate evaluate in the billing service if the namespace can create accept more devices. -func (s *service) BillingEvaluate(client req.Client, tenant string) (bool, error) { - evaluation, _, err := client.BillingEvaluate(tenant) +func (s *service) BillingEvaluate(ctx context.Context, client req.Client, tenant string) (bool, error) { + evaluation, err := client.BillingEvaluate(ctx, tenant) if err != nil { return false, ErrEvaluate } @@ -24,18 +27,20 @@ const ( ReportNamespaceDelete = "namespace_delete" ) -func (s *service) BillingReport(client req.Client, tenant string, action string) error { - status, err := client.BillingReport(tenant, action) - if err != nil { - return err - } +func (s *service) BillingReport(ctx context.Context, client req.Client, tenant string, action string) error { + if err := client.BillingReport(ctx, tenant, action); err != nil { + var e *req.Error + if ok := errors.As(err, &e); !ok { + return ErrReport + } - switch status { - case 200: - return nil - case 402: - return ErrPaymentRequired - default: - return ErrReport + switch e.Code { + case 402: + return ErrPaymentRequired + default: + return ErrReport + } } + + return nil } diff --git a/api/services/billing_test.go b/api/services/billing_test.go index 3299b4a8584..532100863e7 100644 --- a/api/services/billing_test.go +++ b/api/services/billing_test.go @@ -1,14 +1,17 @@ package services import ( + "context" "errors" "testing" "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/api/store/mocks" + req "github.com/shellhub-io/shellhub/pkg/api/internalclient" "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" ) func TestBillingEvaluate(t *testing.T) { @@ -17,7 +20,7 @@ func TestBillingEvaluate(t *testing.T) { err error } - mock := new(mocks.Store) + storeMock := new(mocks.Store) cases := []struct { description string @@ -29,7 +32,7 @@ func TestBillingEvaluate(t *testing.T) { description: "succeeds when client method succeeds", tenant: "00000000-0000-0000-0000-000000000000", requiredMocks: func() { - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{CanAccept: true, CanConnect: true}, 0, nil).Once() + clientMock.On("BillingEvaluate", mock.Anything, "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{CanAccept: true, CanConnect: true}, nil).Once() }, expected: Expected{canAccept: true, err: nil}, }, @@ -37,7 +40,7 @@ func TestBillingEvaluate(t *testing.T) { description: "fails when client method fails", tenant: "00000000-0000-0000-0000-000000000000", requiredMocks: func() { - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{CanAccept: true, CanConnect: true}, 0, ErrEvaluate).Once() + clientMock.On("BillingEvaluate", mock.Anything, "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{CanAccept: true, CanConnect: true}, ErrEvaluate).Once() }, expected: Expected{canAccept: false, err: ErrEvaluate}, }, @@ -47,17 +50,17 @@ func TestBillingEvaluate(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, cache.NewNullCache(), clientMock, nil) - canAccept, err := service.BillingEvaluate(clientMock, tc.tenant) + service := NewService(store.Store(storeMock), privateKey, publicKey, cache.NewNullCache(), clientMock) + canAccept, err := service.BillingEvaluate(context.Background(), clientMock, tc.tenant) assert.Equal(t, tc.expected, Expected{canAccept: canAccept, err: err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestBillingReport(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(mocks.Store) cases := []struct { description string @@ -71,7 +74,7 @@ func TestBillingReport(t *testing.T) { tenant: "00000000-0000-0000-0000-000000000000", action: "device_accept", requiredMocks: func() { - clientMock.On("BillingReport", "00000000-0000-0000-0000-000000000000", "device_accept").Return(200, nil).Once() + clientMock.On("BillingReport", mock.Anything, "00000000-0000-0000-0000-000000000000", "device_accept").Return(nil).Once() }, expected: nil, }, @@ -80,7 +83,7 @@ func TestBillingReport(t *testing.T) { tenant: "00000000-0000-0000-0000-000000000000", action: "device_accept", requiredMocks: func() { - clientMock.On("BillingReport", "00000000-0000-0000-0000-000000000000", "device_accept").Return(402, nil).Once() + clientMock.On("BillingReport", mock.Anything, "00000000-0000-0000-0000-000000000000", "device_accept").Return(&req.Error{Code: 402, Message: ""}).Once() }, expected: ErrPaymentRequired, }, @@ -89,7 +92,7 @@ func TestBillingReport(t *testing.T) { tenant: "00000000-0000-0000-0000-000000000000", action: "device_accept", requiredMocks: func() { - clientMock.On("BillingReport", "00000000-0000-0000-0000-000000000000", "device_accept").Return(500, nil).Once() + clientMock.On("BillingReport", mock.Anything, "00000000-0000-0000-0000-000000000000", "device_accept").Return(&req.Error{Code: 500, Message: ""}).Once() }, expected: ErrReport, }, @@ -98,9 +101,9 @@ func TestBillingReport(t *testing.T) { tenant: "00000000-0000-0000-0000-000000000000", action: "device_accept", requiredMocks: func() { - clientMock.On("BillingReport", "00000000-0000-0000-0000-000000000000", "device_accept").Return(0, errors.New("error")).Once() + clientMock.On("BillingReport", mock.Anything, "00000000-0000-0000-0000-000000000000", "device_accept").Return(errors.New("error")).Once() }, - expected: errors.New("error"), + expected: ErrReport, }, } @@ -108,11 +111,11 @@ func TestBillingReport(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, cache.NewNullCache(), clientMock, nil) - err := service.BillingReport(clientMock, tc.tenant, tc.action) + service := NewService(store.Store(storeMock), privateKey, publicKey, cache.NewNullCache(), clientMock) + err := service.BillingReport(context.Background(), clientMock, tc.tenant, tc.action) assert.Equal(t, tc.expected, err) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } diff --git a/api/services/device.go b/api/services/device.go index b2d4b467826..25f9e57eafd 100644 --- a/api/services/device.go +++ b/api/services/device.go @@ -2,72 +2,98 @@ package services import ( "context" - "fmt" - "net" + "errors" "strings" - "time" "github.com/shellhub-io/shellhub/api/store" - req "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/envs" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/pkg/validator" - "github.com/sirupsen/logrus" + log "github.com/sirupsen/logrus" ) const StatusAccepted = "accepted" type DeviceService interface { - ListDevices(ctx context.Context, tenant string, pagination paginator.Query, filter []models.Filter, status models.DeviceStatus, sort, order string) ([]models.Device, int, error) + ListDevices(ctx context.Context, req *requests.DeviceList) ([]models.Device, int, error) GetDevice(ctx context.Context, uid models.UID) (*models.Device, error) - GetDeviceByPublicURLAddress(ctx context.Context, address string) (*models.Device, error) + + // ResolveDevice attempts to resolve a device by searching for either its UID or hostname. When both are provided, + // UID takes precedence over hostname. The search is scoped to the namespace's tenant ID to limit results. + // + // It returns the resolved device and any error encountered. + ResolveDevice(ctx context.Context, req *requests.ResolveDevice) (*models.Device, error) + DeleteDevice(ctx context.Context, uid models.UID, tenant string) error + + // RenameDevice renames the specified device. + // This method is deprecated, use [DeviceService#UpdateDevice] instead. RenameDevice(ctx context.Context, uid models.UID, name, tenant string) error + LookupDevice(ctx context.Context, namespace, name string) (*models.Device, error) - OffineDevice(ctx context.Context, uid models.UID, online bool) error - UpdateDeviceStatus(ctx context.Context, tenant string, uid models.UID, status models.DeviceStatus) error - SetDevicePosition(ctx context.Context, uid models.UID, ip string) error - DeviceHeartbeat(ctx context.Context, uid models.UID) error - UpdateDevice(ctx context.Context, tenant string, uid models.UID, name *string, publicURL *bool) error + OfflineDevice(ctx context.Context, uid models.UID) error + + UpdateDevice(ctx context.Context, req *requests.DeviceUpdate) error + // UpdateDeviceStatus updates a device's status. Devices that are already accepted cannot change their status. + // + // When accepting, if a device with the same MAC address is already accepted within the same namespace, it + // merges these devices unless a third device with the same hostname already exists and has a different MAC + // address. The merge transfers all sessions from the old device to the new one, renames the new device to + // preserve the old device's identity, and deletes the old device. Also, if another accepted device already + // uses the same hostname but has a different MAC address, the operation fails. + // + // Environment-specific Acceptance Rules: + // - Community/Enterprise: Only checks the namespace's device limit + // - Cloud (billing active): Reports device acceptance to billing service for quota/payment validation + // - Cloud (billing inactive): Checks if the device is removed and evaluates namespace capabilities: + // * If device was previously removed: removes from removed list, then evaluates billing + // * If device was not removed: counts total removed devices and checks against limits, then evaluates billing + // * Billing evaluation determines if the namespace can accept more devices based on subscription status + // + // All operations are performed within a database transaction to ensure consistency during device merging + // and counter updates. + UpdateDeviceStatus(ctx context.Context, req *requests.DeviceUpdateStatus) error } -func (s *service) ListDevices(ctx context.Context, tenant string, pagination paginator.Query, filter []models.Filter, status models.DeviceStatus, sort, order string) ([]models.Device, int, error) { - switch status { - case models.DeviceStatusPending, models.DeviceStatusRejected: - ns, err := s.store.NamespaceGet(ctx, tenant) - if err != nil { - return nil, 0, NewErrNamespaceNotFound(tenant, err) - } +func (s *service) ListDevices(ctx context.Context, req *requests.DeviceList) ([]models.Device, int, error) { + opts := []store.QueryOption{} - count, err := s.store.DeviceRemovedCount(ctx, ns.TenantID) - if err != nil { - return nil, 0, NewErrDeviceRemovedCount(err) - } + if req.DeviceStatus != "" { + opts = append(opts, s.store.Options().WithDeviceStatus(req.DeviceStatus)) + } - if ns.HasMaxDevices() && int64(ns.DevicesCount)+count >= int64(ns.MaxDevices) { - return s.store.DeviceList(ctx, pagination, filter, status, sort, order, store.DeviceListModeMaxDeviceReached) - } - case models.DeviceStatusRemoved: - removed, count, err := s.store.DeviceRemovedList(ctx, tenant, pagination, filter, sort, order) + if req.TenantID != "" { + opts = append(opts, s.store.Options().InNamespace(req.TenantID)) + } + + if req.Sorter.By == "" { + req.Sorter.By = "last_seen" + } + + opts = append(opts, s.store.Options().Match(&req.Filters), s.store.Options().Sort(&req.Sorter), s.store.Options().Paginate(&req.Paginator)) + + if req.DeviceStatus == models.DeviceStatusRemoved { + return s.store.DeviceList(ctx, store.DeviceAcceptableFromRemoved, opts...) + } + + if req.TenantID != "" { + ns, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) if err != nil { - return nil, 0, err + return nil, 0, NewErrNamespaceNotFound(req.TenantID, err) } - devices := make([]models.Device, 0, len(removed)) - for _, device := range removed { - devices = append(devices, *device.Device) + // Unified logic: if limit reached, prevent accepting new devices + if ns.HasMaxDevices() && ns.HasMaxDevicesReached() { + return s.store.DeviceList(ctx, store.DeviceAcceptableAsFalse, opts...) } - - return devices, count, nil } - return s.store.DeviceList(ctx, pagination, filter, status, sort, order, store.DeviceListModeDefault) + return s.store.DeviceList(ctx, store.DeviceAcceptableIfNotAccepted, opts...) } func (s *service) GetDevice(ctx context.Context, uid models.UID) (*models.Device, error) { - device, err := s.store.DeviceGet(ctx, uid) + device, err := s.store.DeviceResolve(ctx, store.DeviceUIDResolver, string(uid)) if err != nil { return nil, NewErrDeviceNotFound(uid, err) } @@ -75,10 +101,23 @@ func (s *service) GetDevice(ctx context.Context, uid models.UID) (*models.Device return device, nil } -func (s *service) GetDeviceByPublicURLAddress(ctx context.Context, address string) (*models.Device, error) { - device, err := s.store.DeviceGetByPublicURLAddress(ctx, address) +func (s *service) ResolveDevice(ctx context.Context, req *requests.ResolveDevice) (*models.Device, error) { + n, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) if err != nil { - return nil, NewErrDeviceNotFound(models.UID(address), err) + return nil, NewErrNamespaceNotFound(req.TenantID, err) + } + + var device *models.Device + switch { + case req.UID != "": + device, err = s.store.DeviceResolve(ctx, store.DeviceUIDResolver, req.UID, s.store.Options().InNamespace(n.TenantID)) + case req.Hostname != "": + device, err = s.store.DeviceResolve(ctx, store.DeviceHostnameResolver, req.Hostname, s.store.Options().InNamespace(n.TenantID)) + } + + if err != nil { + // TODO: refactor this error to accept a string instead of models.UID + return nil, NewErrDeviceNotFound(models.UID(""), err) } return device, nil @@ -93,70 +132,56 @@ func (s *service) GetDeviceByPublicURLAddress(ctx context.Context, address strin // NewErrNamespaceNotFound(tenant, err), if the usage cannot be reported, ErrReport or if the store function that // delete the device fails. func (s *service) DeleteDevice(ctx context.Context, uid models.UID, tenant string) error { - device, err := s.store.DeviceGetByUID(ctx, uid, tenant) + device, err := s.store.DeviceResolve(ctx, store.DeviceUIDResolver, string(uid), s.store.Options().InNamespace(tenant)) if err != nil { return NewErrDeviceNotFound(uid, err) } - ns, err := s.store.NamespaceGet(ctx, tenant) - if err != nil { - return NewErrNamespaceNotFound(tenant, err) - } + // NOTE: Always soft-delete accepted devices for audit purposes. + // Pending/Rejected devices can be hard-deleted as they don't need audit trail. + if device.Status == models.DeviceStatusAccepted { + now := clock.Now() + + deviceCopy := *device + deviceCopy.Status = models.DeviceStatusRemoved + deviceCopy.RemovedAt = &now + if err := s.store.DeviceUpdate(ctx, &deviceCopy); err != nil { + return err + } - // If the namespace has a limit of devices, we change the device's slot status to removed. - // This way, we can keep track of the number of devices that were removed from the namespace and void the device - // switching. - if envs.IsCloud() && envs.HasBilling() && !ns.Billing.IsActive() { - if err := s.store.DeviceRemovedInsert(ctx, tenant, device); err != nil { - return NewErrDeviceRemovedInsert(err) + if err := s.store.NamespaceIncrementDeviceCount(ctx, tenant, models.DeviceStatusRemoved, 1); err != nil { + return err + } + } else { + // Hard-delete pending/rejected devices (no audit needed) + if err := s.store.DeviceDelete(ctx, device); err != nil { + return err } } - return s.store.DeviceDelete(ctx, uid) + if err := s.store.NamespaceIncrementDeviceCount(ctx, tenant, device.Status, -1); err != nil { //nolint:revive + return err + } + + return nil } func (s *service) RenameDevice(ctx context.Context, uid models.UID, name, tenant string) error { - device, err := s.store.DeviceGetByUID(ctx, uid, tenant) + device, err := s.store.DeviceResolve(ctx, store.DeviceUIDResolver, string(uid), s.store.Options().InNamespace(tenant)) if err != nil { return NewErrDeviceNotFound(uid, err) } - updatedDevice := &models.Device{ - UID: device.UID, - Name: strings.ToLower(name), - Identity: device.Identity, - Info: device.Info, - PublicKey: device.PublicKey, - TenantID: device.TenantID, - LastSeen: device.LastSeen, - Online: device.Online, - Namespace: device.Namespace, - Status: device.Status, - CreatedAt: time.Time{}, - RemoteAddr: "", - Position: &models.DevicePosition{}, - Tags: []string{}, - PublicURL: false, - } - - if ok, err := s.validator.Struct(updatedDevice); !ok || err != nil { - return NewErrDeviceInvalid(nil, err) - } - - if device.Name == updatedDevice.Name { + if strings.EqualFold(device.Name, name) { return nil } - otherDevice, err := s.store.DeviceGetByName(ctx, updatedDevice.Name, tenant, models.DeviceStatusAccepted) - if err != nil && err != store.ErrNoDocuments { - return NewErrDeviceNotFound(models.UID(updatedDevice.UID), err) - } - - if otherDevice != nil { - return NewErrDeviceDuplicated(otherDevice.Name, err) + device.Name = strings.ToLower(name) + if err := s.store.DeviceUpdate(ctx, device); err != nil { // nolint:revive + return err } - return s.store.DeviceRename(ctx, uid, name) + return nil } // LookupDevice looks for a device in a namespace. @@ -164,213 +189,249 @@ func (s *service) RenameDevice(ctx context.Context, uid models.UID, name, tenant // It receives a context, used to "control" the request flow and, the namespace name from a models.Namespace and a // device name from models.Device. func (s *service) LookupDevice(ctx context.Context, namespace, name string) (*models.Device, error) { - device, err := s.store.DeviceLookup(ctx, namespace, name) - if err != nil || device == nil { - return nil, NewErrDeviceLookupNotFound(namespace, name, err) + n, err := s.store.NamespaceResolve(ctx, store.NamespaceNameResolver, strings.ToLower(namespace)) + if err != nil { + return nil, NewErrNamespaceNotFound(namespace, err) } - return device, nil -} + opts := []store.QueryOption{ + s.store.Options().InNamespace(n.TenantID), + s.store.Options().WithDeviceStatus(models.DeviceStatusAccepted), + } -func (s *service) OffineDevice(ctx context.Context, uid models.UID, online bool) error { - err := s.store.DeviceSetOnline(ctx, uid, clock.Now(), online) - if err == store.ErrNoDocuments { - return NewErrDeviceNotFound(uid, err) + device, err := s.store.DeviceResolve(ctx, store.DeviceHostnameResolver, name, opts...) + if err != nil || device == nil { + return nil, NewErrDeviceNotFound(models.UID(name), err) } - return err + return device, nil } -// UpdateDeviceStatus updates the device status. -func (s *service) UpdateDeviceStatus(ctx context.Context, tenant string, uid models.UID, status models.DeviceStatus) error { - namespace, err := s.store.NamespaceGet(ctx, tenant) - if err != nil { - return NewErrNamespaceNotFound(tenant, err) - } - - device, err := s.store.DeviceGetByUID(ctx, uid, tenant) - if err != nil { +func (s *service) OfflineDevice(ctx context.Context, uid models.UID) error { + device, err := s.store.DeviceResolve(ctx, store.DeviceUIDResolver, string(uid)) + if err != nil || device == nil { return NewErrDeviceNotFound(uid, err) } - if device.Status == models.DeviceStatusAccepted { - return NewErrDeviceStatusAccepted(nil) - } + now := clock.Now() + device.DisconnectedAt = &now + if err := s.store.DeviceUpdate(ctx, device); err != nil { // nolint:revive + if errors.Is(err, store.ErrNoDocuments) { + return NewErrDeviceNotFound(uid, err) + } - // NOTICE: when the device is intended to be rejected or in pending status, we don't check for duplications as it - // is not going to be considered for connections. - if status == models.DeviceStatusPending || status == models.DeviceStatusRejected { - return s.store.DeviceUpdateStatus(ctx, uid, status) + return err } - // NOTICE: when the intended status is not accepted, we return an error because these status are not allowed - // to be set by the user. - if status != models.DeviceStatusAccepted { - return NewErrDeviceStatusInvalid(string(status), nil) - } + return nil +} - // NOTICE: when there is an already accepted device with the same MAC address, we need to update the device UID - // transfer the sessions and delete the old device. - sameMacDev, err := s.store.DeviceGetByMac(ctx, device.Identity.MAC, device.TenantID, models.DeviceStatusAccepted) - if err != nil && err != store.ErrNoDocuments { - return NewErrDeviceNotFound(models.UID(device.UID), err) - } +func (s *service) UpdateDeviceStatus(ctx context.Context, req *requests.DeviceUpdateStatus) error { + return s.store.WithTransaction(ctx, s.updateDeviceStatus(req)) +} - // TODO: move this logic to store's transactions. - if sameMacDev != nil && sameMacDev.UID != device.UID { - if sameName, err := s.store.DeviceGetByName(ctx, device.Name, device.TenantID, models.DeviceStatusAccepted); sameName != nil && sameName.Identity.MAC != device.Identity.MAC { - return NewErrDeviceDuplicated(device.Name, err) +func (s *service) updateDeviceStatus(req *requests.DeviceUpdateStatus) store.TransactionCb { + return func(ctx context.Context) error { + namespace, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil { + return NewErrNamespaceNotFound(req.TenantID, err) } - if err := s.store.SessionUpdateDeviceUID(ctx, models.UID(sameMacDev.UID), models.UID(device.UID)); err != nil && err != store.ErrNoDocuments { - return err + device, err := s.store.DeviceResolve(ctx, store.DeviceUIDResolver, req.UID, s.store.Options().InNamespace(namespace.TenantID)) + if err != nil { + return NewErrDeviceNotFound(models.UID(req.UID), err) } - if err := s.store.DeviceRename(ctx, models.UID(device.UID), sameMacDev.Name); err != nil { - return err - } + if device.Status == models.DeviceStatusAccepted { + log.WithFields(log.Fields{"device_uid": device.UID}). + Warn("cannot change status - device already accepted") - if err := s.store.DeviceDelete(ctx, models.UID(sameMacDev.UID)); err != nil { - return err + return NewErrDeviceStatusAccepted(nil) } - return s.store.DeviceUpdateStatus(ctx, uid, status) - } + oldStatus := device.Status + newStatus := models.DeviceStatus(req.Status) - if sameName, err := s.store.DeviceGetByName(ctx, device.Name, device.TenantID, models.DeviceStatusAccepted); sameName != nil { - return NewErrDeviceDuplicated(device.Name, err) - } + if newStatus == device.Status { + return nil + } - if status != models.DeviceStatusAccepted { - return s.store.DeviceUpdateStatus(ctx, uid, status) - } + if newStatus == models.DeviceStatusAccepted { + opts := []store.QueryOption{s.store.Options().WithDeviceStatus(models.DeviceStatusAccepted), s.store.Options().InNamespace(namespace.TenantID)} + existingMacDevice, err := s.store.DeviceResolve(ctx, store.DeviceMACResolver, device.Identity.MAC, opts...) + if err != nil && !errors.Is(err, store.ErrNoDocuments) { + log.WithError(err). + WithFields(log.Fields{"mac": device.Identity.MAC}). + Error("failed to retrieve device using MAC") - switch { - case envs.IsCommunity(), envs.IsEnterprise(): - if namespace.HasMaxDevices() && namespace.HasMaxDevicesReached() { - return NewErrDeviceMaxDevicesReached(namespace.MaxDevices) - } - case envs.IsCloud(): - if namespace.Billing.IsActive() { - if err := s.BillingReport(s.client.(req.Client), namespace.TenantID, ReportDeviceAccept); err != nil { - return NewErrBillingReportNamespaceDelete(err) - } - } else { - // TODO: this strategy that stores the removed devices in the database can be simplified. - removed, err := s.store.DeviceRemovedGet(ctx, tenant, uid) - if err != nil && err != store.ErrNoDocuments { - return NewErrDeviceRemovedGet(err) + return err } - if removed != nil { - if err := s.store.DeviceRemovedDelete(ctx, tenant, uid); err != nil { - return NewErrDeviceRemovedDelete(err) + if existingMacDevice != nil && existingMacDevice.UID != device.UID { + existingNameDevice, err := s.store.DeviceResolve(ctx, store.DeviceHostnameResolver, device.Name, opts...) + if err != nil && !errors.Is(err, store.ErrNoDocuments) { + log.WithError(err). + WithFields(log.Fields{"name": device.Name}). + Error("failed to retrieve device using name") + + return err + } + + if existingNameDevice != nil && existingNameDevice.Identity.MAC != device.Identity.MAC { + log.WithFields(log.Fields{"device_uid": device.UID, "device_mac": device.Identity.MAC, "conflicting_device_name": device.Name}). + Error("device merge blocked - hostname already used by device with different MAC address") + + return NewErrDeviceDuplicated(device.Name, nil) + } + + if err := s.mergeDevice(ctx, namespace.TenantID, existingMacDevice, device); err != nil { + log.WithError(err). + WithFields(log.Fields{"device_uid": device.UID, "existing_device_uid": existingMacDevice.UID, "device_mac": device.Identity.MAC}). + Error("device merge operation failed") + + return err } } else { - count, err := s.store.DeviceRemovedCount(ctx, tenant) - if err != nil { - return NewErrDeviceRemovedCount(err) + existingDevice, err := s.store.DeviceResolve(ctx, store.DeviceHostnameResolver, device.Name, opts...) + if err != nil && !errors.Is(err, store.ErrNoDocuments) { + log.WithError(err). + WithFields(log.Fields{"name": device.Name}). + Error("failed to retrieve device using name") + + return err } - if namespace.HasMaxDevices() && int64(namespace.DevicesCount)+count >= int64(namespace.MaxDevices) { - return NewErrDeviceRemovedFull(namespace.MaxDevices, nil) + if existingDevice != nil { + log.WithFields(log.Fields{"device_uid": device.UID, "conflicting_device_name": device.Name}). + Error("device acceptance blocked - hostname already used by another device") + + return NewErrDeviceDuplicated(device.Name, nil) } - } - ok, err := s.BillingEvaluate(s.client.(req.Client), namespace.TenantID) - if err != nil { - return NewErrBillingEvaluate(err) + if envs.IsCloud() { + hasBillingActive := namespace.Billing != nil && namespace.Billing.IsActive() + + if !hasBillingActive && namespace.HasMaxDevices() && namespace.HasMaxDevicesReached() { + log.WithError(err).WithFields(log.Fields{"device_uid": device.UID}). + Error("namespace's limit reached - cannot accept another device") + + return NewErrDeviceLimit(namespace.MaxDevices, nil) + } + + if err := s.handleCloudBilling(ctx, namespace); err != nil { + log.WithError(err).WithFields(log.Fields{"device_uid": device.UID, "billing_active": namespace.Billing.IsActive()}). + Error("billing validation failed") + + return err + } + } else { + if namespace.HasMaxDevices() && namespace.HasMaxDevicesReached() { + return NewErrDeviceMaxDevicesReached(namespace.MaxDevices) + } + } } + } - if !ok { - return ErrDeviceLimit + device.Status = newStatus + device.StatusUpdatedAt = clock.Now() + if err := s.store.DeviceUpdate(ctx, device); err != nil { + return err + } + + for status, count := range map[models.DeviceStatus]int64{oldStatus: -1, newStatus: 1} { + if err := s.store.NamespaceIncrementDeviceCount(ctx, namespace.TenantID, status, count); err != nil { + return err } } - } - return s.store.DeviceUpdateStatus(ctx, uid, status) + return nil + } } -// SetDevicePosition sets the position to a device from its IP. -func (s *service) SetDevicePosition(ctx context.Context, uid models.UID, ip string) error { - ipParsed := net.ParseIP(ip) - position, err := s.locator.GetPosition(ipParsed) +func (s *service) UpdateDevice(ctx context.Context, req *requests.DeviceUpdate) error { + device, err := s.store.DeviceResolve(ctx, store.DeviceUIDResolver, req.UID, s.store.Options().InNamespace(req.TenantID)) if err != nil { - logrus. - WithError(err). - WithFields(logrus.Fields{ - "uid": uid, - "ip": ip, - }).Error("Failed to get device's position") + return NewErrDeviceNotFound(models.UID(req.UID), err) } - devicePosition := models.DevicePosition{ - Longitude: position.Longitude, - Latitude: position.Latitude, + conflictsTarget := &models.DeviceConflicts{Name: req.Name} + conflictsTarget.Distinct(device) + if _, has, err := s.store.DeviceConflicts(ctx, conflictsTarget); err != nil || has { + return NewErrDeviceDuplicated(req.Name, err) } - err = s.store.DeviceSetPosition(ctx, uid, devicePosition) - if err != nil { - logrus. - WithError(err). - WithFields(logrus.Fields{ - "uid": uid, - "ip": ip, - }).Error("Failed to set device's position to database") + if req.Name != "" && !strings.EqualFold(req.Name, device.Name) { + device.Name = strings.ToLower(req.Name) + } + if err := s.store.DeviceUpdate(ctx, device); err != nil { // nolint:revive return err } - logrus.WithFields(logrus.Fields{ - "uid": uid, - "ip": ip, - "position": position, - }).Debug("Success to set device's position") return nil } -func (s *service) DeviceHeartbeat(ctx context.Context, uid models.UID) error { - if err := s.store.DeviceSetOnline(ctx, uid, clock.Now(), true); err != nil { - return NewErrDeviceNotFound(uid, err) +// mergeDevice merges an old device into a new device. It transfers all sessions from the old device to the new one and +// renames the new device to preserve the old device's identity. The old device is then deleted and the namespace's device count is decremented. +func (s *service) mergeDevice(ctx context.Context, tenantID string, oldDevice *models.Device, newDevice *models.Device) error { + logFields := log.Fields{"tenant_id": tenantID, "old_device_uid": oldDevice.UID, "new_device_uid": newDevice.UID} + + log.WithFields(logFields).Debug("transferring tunnels from old device to new device") + if err := s.store.TunnelUpdateDeviceUID(ctx, tenantID, oldDevice.UID, newDevice.UID); err != nil { + log.WithError(err).WithFields(logFields).Error("failed to transfer tunnels") + + return err } - return nil -} + log.WithFields(logFields).Debug("transferring sessions from old device to new device") + if err := s.store.SessionUpdateDeviceUID(ctx, models.UID(oldDevice.UID), models.UID(newDevice.UID)); err != nil && !errors.Is(err, store.ErrNoDocuments) { + log.WithError(err).WithFields(logFields).Error("failed to transfer sessions") -func (s *service) UpdateDevice(ctx context.Context, tenant string, uid models.UID, name *string, publicURL *bool) error { - device, err := s.store.DeviceGetByUID(ctx, uid, tenant) - if err != nil { - return NewErrDeviceNotFound(uid, err) + return err } - if name != nil { - *name = strings.ToLower(*name) + log.WithFields(logFields).Debug("updating new device name to preserve old device identity") + newDevice.Name = oldDevice.Name + if err := s.store.DeviceUpdate(ctx, newDevice); err != nil { + log.WithError(err).WithFields(logFields).Error("failed to update new device name") - if device.Name == *name { - return nil - } + return err + } - if ok, err := s.validator.Var(*name, validator.DeviceNameTag); err != nil || !ok { - return NewErrDeviceInvalid(map[string]interface{}{"name": *name}, nil) - } + log.WithFields(logFields).Debug("mergeDevice: deleting old device") + if err := s.store.DeviceDelete(ctx, oldDevice); err != nil { + log.WithError(err).WithFields(logFields).Error("failed to delete old device") - otherDevice, err := s.store.DeviceGetByName(ctx, *name, tenant, models.DeviceStatusAccepted) - if err != nil && err != store.ErrNoDocuments { - return NewErrDeviceNotFound(models.UID(*name), fmt.Errorf("failed to get device by name: %w", err)) - } + return err + } - if otherDevice != nil { - return NewErrDeviceDuplicated(otherDevice.Name, err) - } + if err := s.store.NamespaceIncrementDeviceCount(ctx, tenantID, oldDevice.Status, -1); err != nil { + log.WithError(err).WithFields(logFields).Error("failed to decrement namespace device count") + + return err } - if publicURL != nil { - if device.PublicURLAddress == "" && *publicURL { - if err := s.store.DeviceCreatePublicURLAddress(ctx, models.UID(device.UID)); err != nil { - return err - } + log.WithFields(logFields).Info("device merge operation completed successfully") + + return nil +} + +// handleCloudBilling processes billing-related operations for Cloud environment. +// This function has side effects: it may delete removed devices and report to billing. +func (s *service) handleCloudBilling(ctx context.Context, namespace *models.Namespace) error { + if namespace.Billing.IsActive() { + if err := s.BillingReport(ctx, s.client, namespace.TenantID, ReportDeviceAccept); err != nil { + return NewErrBillingReportNamespaceDelete(err) + } + } else { + ok, err := s.BillingEvaluate(ctx, s.client, namespace.TenantID) + switch { + case err != nil: + return NewErrBillingEvaluate(err) + case !ok: + return ErrDeviceLimit } } - return s.store.DeviceUpdate(ctx, tenant, uid, name, publicURL) + return nil } diff --git a/api/services/device_tags.go b/api/services/device_tags.go deleted file mode 100644 index b3a7ad4dee5..00000000000 --- a/api/services/device_tags.go +++ /dev/null @@ -1,93 +0,0 @@ -package services - -import ( - "context" - - "github.com/shellhub-io/shellhub/pkg/models" -) - -// DeviceTags contains the service's function to manage device tags. -type DeviceTags interface { - CreateDeviceTag(ctx context.Context, uid models.UID, tag string) error - RemoveDeviceTag(ctx context.Context, uid models.UID, tag string) error - UpdateDeviceTag(ctx context.Context, uid models.UID, tags []string) error -} - -// DeviceMaxTags is the number of tags that a device can have. -const DeviceMaxTags = 3 - -// CreateDeviceTag creates a new tag to a device. UID is the device's UID and tag is the tag's name. -// -// If the device does not exist, a NewErrDeviceNotFound error will be returned. -// If the tag already exist, a NewErrTagDuplicated error will be returned. -// If the device already has the maximum number of tags, a NewErrTagLimit error will be returned. -// A unknown error will be returned if the tag is not created. -func (s *service) CreateDeviceTag(ctx context.Context, uid models.UID, tag string) error { - device, err := s.store.DeviceGet(ctx, uid) - if err != nil || device == nil { - return NewErrDeviceNotFound(uid, err) - } - - if len(device.Tags) == DeviceMaxTags { - return NewErrTagLimit(DeviceMaxTags, nil) - } - - if contains(device.Tags, tag) { - return NewErrTagDuplicated(tag, nil) - } - - return s.store.DevicePushTag(ctx, uid, tag) -} - -// RemoveDeviceTag removes a tag from a device. UID is the device's UID and tag is the tag's name. -// -// If the device does not exist, a NewErrDeviceNotFound error will be returned. -// If the tag does not exist, a NewErrTagNotFound error will be returned. -// A unknown error will be returned if the tag is not removed. -func (s *service) RemoveDeviceTag(ctx context.Context, uid models.UID, tag string) error { - device, err := s.store.DeviceGet(ctx, uid) - if err != nil || device == nil { - return NewErrDeviceNotFound(uid, err) - } - - if !contains(device.Tags, tag) { - return NewErrTagNotFound(tag, nil) - } - - return s.store.DevicePullTag(ctx, uid, tag) -} - -// UpdateDeviceTag updates a device's tags. UID is the device's UID and tags is the new tags. -// -// If length of tags is greater than DeviceMaxTags, a NewErrTagLimit error will be returned. -// If tags' list contains a duplicated one, it is removed and the device's tag will be updated. -// If the device does not exist, a NewErrDeviceNotFound error will be returned. -func (s *service) UpdateDeviceTag(ctx context.Context, uid models.UID, tags []string) error { - if len(tags) > DeviceMaxTags { - return NewErrTagLimit(DeviceMaxTags, nil) - } - - if _, err := s.store.DeviceGet(ctx, uid); err != nil { - return NewErrDeviceNotFound(uid, err) - } - - // TODO: remove this conversion function in favor of a external package. - set := func(list []string) []string { - s := make(map[string]bool) - l := make([]string, 0) - for _, o := range list { - if _, ok := s[o]; !ok { - s[o] = true - l = append(l, o) - } - } - - return l - }(tags) - - if _, _, err := s.store.DeviceSetTags(ctx, uid, set); err != nil { - return err - } - - return nil -} diff --git a/api/services/device_tags_test.go b/api/services/device_tags_test.go deleted file mode 100644 index eec01191c8b..00000000000 --- a/api/services/device_tags_test.go +++ /dev/null @@ -1,248 +0,0 @@ -package services - -import ( - "context" - "testing" - - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mocks" - storecache "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/pkg/errors" - mocksGeoIp "github.com/shellhub-io/shellhub/pkg/geoip/mocks" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/assert" -) - -const ( - invalidUID = "Fails to find the device invalid uid" -) - -func TestCreateTag(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - cases := []struct { - description string - uid models.UID - deviceName string - requiredMocks func() - expected error - }{ - { - description: "Fails to find the device invalid uid", - uid: "invalid_uid", - deviceName: "device1", - requiredMocks: func() { - mock.On("DeviceGet", ctx, models.UID("invalid_uid")).Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound(models.UID("invalid_uid"), errors.New("error", "", 0)), - }, - { - description: "Fails duplicated name", - uid: models.UID("uid"), - deviceName: "device1", - requiredMocks: func() { - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - Tags: []string{"device1"}, - } - - mock.On("DeviceGet", ctx, models.UID("uid")).Return(device, nil).Once() - }, - expected: NewErrTagDuplicated("device1", nil), - }, - { - description: "Successful create a tag for the device", - uid: models.UID("uid"), - deviceName: "device6", - requiredMocks: func() { - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - Tags: []string{"device1"}, - } - - mock.On("DeviceGet", ctx, models.UID(device.UID)).Return(device, nil).Once() - mock.On("DevicePushTag", ctx, models.UID(device.UID), "device6").Return(nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - locator := &mocksGeoIp.Locator{} - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, locator) - - err := service.CreateDeviceTag(ctx, tc.uid, tc.deviceName) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} - -func TestRemoveTag(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - cases := []struct { - description string - uid models.UID - deviceName string - requiredMocks func() - expected error - }{ - { - description: invalidUID, - uid: "invalid_uid", - deviceName: "device1", - requiredMocks: func() { - mock.On("DeviceGet", ctx, models.UID("invalid_uid")).Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound(models.UID("invalid_uid"), errors.New("error", "", 0)), - }, - { - description: "fail when device does not contain the tag", - uid: models.UID("uid"), - deviceName: "device2", - requiredMocks: func() { - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - Tags: []string{"device1"}, - } - - mock.On("DeviceGet", ctx, models.UID("uid")).Return(device, nil).Once() - }, - expected: NewErrTagNotFound("device2", nil), - }, - { - description: "fail delete a tag", - uid: models.UID("uid"), - deviceName: "device1", - requiredMocks: func() { - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - Tags: []string{"device1"}, - } - - mock.On("DeviceGet", ctx, models.UID("uid")).Return(device, nil).Once() - mock.On("DevicePullTag", ctx, models.UID("uid"), "device1").Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "successful delete a tag", - uid: models.UID("uid"), - deviceName: "device1", - requiredMocks: func() { - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - Tags: []string{"device1"}, - } - - mock.On("DeviceGet", ctx, models.UID("uid")).Return(device, nil).Once() - mock.On("DevicePullTag", ctx, models.UID("uid"), "device1").Return(nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - locator := &mocksGeoIp.Locator{} - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, locator) - - err := service.RemoveDeviceTag(ctx, tc.uid, tc.deviceName) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} - -func TestDeviceUpdateTag(t *testing.T) { - storemock := new(mocks.Store) - - cases := []struct { - description string - uid models.UID - tags []string - requiredMocks func() - expected error - }{ - { - description: "fails when tags exceeds the limit", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tags: []string{"device1", "device2", "device3", "device4"}, - requiredMocks: func() { - }, - expected: NewErrTagLimit(DeviceMaxTags, nil), - }, - { - description: "fails when device is not found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tags: []string{"device1", "device2", "device3"}, - requiredMocks: func() { - storemock.On("DeviceGet", context.TODO(), models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c")).Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", errors.New("error", "", 0)), - }, - { - description: "fails when an unexpected error occours", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tags: []string{"device1", "device2", "device3"}, - requiredMocks: func() { - device := &models.Device{ - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - TenantID: "tenant", - } - storemock.On("DeviceGet", context.TODO(), models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c")).Return(device, nil).Once() - - tags := []string{"device1", "device2", "device3"} - storemock.On("DeviceSetTags", context.TODO(), models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), tags).Return(int64(0), int64(0), errors.New("error", "layer", 1)).Once() - }, - expected: errors.New("error", "layer", 1), - }, - { - description: "successful update tags for the device", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tags: []string{"device1", "device2", "device3"}, - requiredMocks: func() { - device := &models.Device{ - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - TenantID: "tenant", - } - storemock.On("DeviceGet", context.TODO(), models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c")).Return(device, nil).Once() - - tags := []string{"device1", "device2", "device3"} - storemock.On("DeviceSetTags", context.TODO(), models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), tags).Return(int64(1), int64(3), nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - locator := &mocksGeoIp.Locator{} - service := NewService(store.Store(storemock), privateKey, publicKey, storecache.NewNullCache(), clientMock, locator) - - err := service.UpdateDeviceTag(context.TODO(), tc.uid, tc.tags) - assert.Equal(t, tc.expected, err) - }) - } - - storemock.AssertExpectations(t) -} diff --git a/api/services/device_test.go b/api/services/device_test.go index 43ed8790efc..3f003be300e 100644 --- a/api/services/device_test.go +++ b/api/services/device_test.go @@ -2,30 +2,30 @@ package services import ( "context" - "fmt" - "net" "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mocks" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + storemock "github.com/shellhub-io/shellhub/api/store/mocks" + req "github.com/shellhub-io/shellhub/pkg/api/internalclient" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/api/requests" storecache "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" + "github.com/shellhub-io/shellhub/pkg/envs" + envsmocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" "github.com/shellhub-io/shellhub/pkg/errors" - "github.com/shellhub-io/shellhub/pkg/geoip" - mocksGeoIp "github.com/shellhub-io/shellhub/pkg/geoip/mocks" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" ) func TestListDevices(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - status := []models.DeviceStatus{models.DeviceStatusPending, models.DeviceStatusAccepted, models.DeviceStatusRejected, models.DeviceStatusRemoved} - order := []string{"asc", "desc"} + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) type Expected struct { devices []models.Device @@ -35,276 +35,698 @@ func TestListDevices(t *testing.T) { cases := []struct { description string - tenant string - pagination paginator.Query - filter []models.Filter - status models.DeviceStatus - sort, order string - requiredMocks func() + req *requests.DeviceList + requiredMocks func(context.Context) expected Expected }{ { - description: "fails when the store device list fails when status is pending", - tenant: "tenant", - pagination: paginator.Query{Page: 1, PerPage: 10}, - filter: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, + description: "fails to list devices", + req: &requests.DeviceList{ + TenantID: "", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, }, - status: status[0], - sort: "name", - order: order[0], - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - MaxDevices: 3, - DevicesCount: 3, - } - - mock.On("NamespaceGet", ctx, namespace.TenantID). - Return(namespace, nil).Once() - mock.On("DeviceRemovedCount", ctx, namespace.TenantID). - Return(int64(1), nil).Once() - mock.On("DeviceList", ctx, paginator.Query{Page: 1, PerPage: 10}, []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, - }, status[0], "name", order[0], store.DeviceListModeMaxDeviceReached). - Return(nil, 0, errors.New("error", "", 0)).Once() + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableIfNotAccepted, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, errors.New("error", "", 0)). + Once() }, expected: Expected{ - nil, - 0, - errors.New("error", "", 0), + devices: []models.Device{}, + count: 0, + err: errors.New("error", "", 0), }, }, { - description: "fails when the store device list fails when status is not pending", - tenant: "tenant", - pagination: paginator.Query{Page: 1, PerPage: 10}, - filter: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, + description: "succeeds to list devices", + req: &requests.DeviceList{ + TenantID: "", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, }, - status: status[1], - sort: "name", - order: order[1], - requiredMocks: func() { - filters := []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, - } - - mock.On("DeviceList", ctx, paginator.Query{Page: 1, PerPage: 10}, filters, status[1], "name", order[1], store.DeviceListModeDefault). - Return(nil, 0, errors.New("error", "", 0)).Once() + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableIfNotAccepted, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, nil). + Once() }, expected: Expected{ - nil, - 0, - errors.New("error", "", 0), + devices: []models.Device{}, + count: 0, + err: nil, }, }, - { - description: "succeeds when status is pending", - tenant: "tenant", - pagination: paginator.Query{Page: 1, PerPage: 10}, - filter: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, - }, - status: status[0], - sort: "name", - order: order[0], - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - MaxDevices: 3, - DevicesCount: 3, - } + } - devices := []models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - } + service := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) - filters := []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, - } + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.TODO() + tc.requiredMocks(ctx) + + devices, count, err := service.ListDevices(ctx, tc.req) + require.Equal(tt, tc.expected, Expected{devices, count, err}) + }) + } + + storeMock.AssertExpectations(t) +} - mock.On("NamespaceGet", ctx, namespace.TenantID). - Return(namespace, nil).Once() - mock.On("DeviceRemovedCount", ctx, namespace.TenantID). - Return(int64(1), nil).Once() - mock.On("DeviceList", ctx, paginator.Query{Page: 1, PerPage: 10}, filters, status[0], "name", order[0], store.DeviceListModeMaxDeviceReached). - Return(devices, len(devices), nil).Once() +func TestListDevices_status_removed(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + type Expected struct { + devices []models.Device + count int + err error + } + + cases := []struct { + description string + req *requests.DeviceList + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "fails when could not list the removed devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusRemoved, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusRemoved). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableFromRemoved, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, errors.New("error", "", 0)). + Once() }, expected: Expected{ - []models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - }, - len([]models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - }), - nil, + devices: []models.Device{}, + count: 0, + err: errors.New("error", "", 0), }, }, { - description: "succeeds when status is not pending", - tenant: "tenant", - pagination: paginator.Query{Page: 1, PerPage: 10}, - filter: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, + description: "succeeds to list the removed devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusRemoved, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, }, - status: status[1], - sort: "name", - order: order[1], - requiredMocks: func() { - filters := []models.Filter{ + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusRemoved). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableFromRemoved, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{{Name: "dev"}}, 1, nil). + Once() + }, + expected: Expected{ + devices: []models.Device{ { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, + Name: "dev", }, - } + }, + count: 1, + err: nil, + }, + }, + } - devices := []models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - } + service := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.TODO() + tc.requiredMocks(ctx) + + devices, count, err := service.ListDevices(ctx, tc.req) + require.Equal(tt, tc.expected, Expected{devices, count, err}) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestListDevices_tenant_not_empty(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + type Expected struct { + devices []models.Device + count int + err error + } - mock.On("DeviceList", ctx, paginator.Query{Page: 1, PerPage: 10}, filters, status[1], "name", order[1], store.DeviceListModeDefault). - Return(devices, len(devices), nil).Once() + cases := []struct { + description string + req *requests.DeviceList + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "fails when the namespace does not exists", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, errors.New("error", "", 0)). + Once() }, expected: Expected{ - []models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - }, - len([]models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - }), - nil, + devices: nil, + count: 0, + err: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", errors.New("error", "", 0)), }, }, { - description: "fails when status is removed", - tenant: "tenant", - pagination: paginator.Query{Page: 1, PerPage: 10}, - filter: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, + description: "[cloud] fails when the namespace reached the device limit and cannot list the devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, }, - status: status[3], - sort: "name", - order: order[1], - requiredMocks: func() { - filters := []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, - } - - mock.On("DeviceRemovedList", ctx, "tenant", paginator.Query{Page: 1, PerPage: 10}, filters, "name", order[1]). - Return(nil, 0, errors.New("error", "", 0)).Once() + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", MaxDevices: 3, DevicesAcceptedCount: 3, DevicesRemovedCount: 1}, nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, errors.New("error", "layer", 0)). + Once() }, expected: Expected{ - nil, - 0, - errors.New("error", "", 0), + devices: []models.Device{}, + count: 0, + err: errors.New("error", "layer", 0), }, }, { - description: "succeeds when status is removed", - tenant: "tenant", - pagination: paginator.Query{Page: 1, PerPage: 10}, - filter: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, + description: "[cloud] succeeds when the namespace reached the device limit and list the devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, }, - status: status[3], - sort: "name", - order: order[1], - requiredMocks: func() { - devices := []models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - } - - removedDevices := []models.DeviceRemoved{ - {Device: &devices[0]}, - {Device: &devices[1]}, - {Device: &devices[2]}, - } - - filters := []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{Name: "hostname", Operator: "eq"}, - }, - } - mock.On("DeviceRemovedList", ctx, "tenant", paginator.Query{Page: 1, PerPage: 10}, filters, "name", order[1]). - Return(removedDevices, len(removedDevices), nil).Once() + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", MaxDevices: 3, DevicesAcceptedCount: 3, DevicesRemovedCount: 1}, nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, nil). + Once() }, expected: Expected{ - []models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - }, - len([]models.Device{ - {UID: "uid"}, - {UID: "uid2"}, - {UID: "uid3"}, - }), - nil, + devices: []models.Device{}, + count: 0, + err: nil, + }, + }, + { + description: "[cloud] fails when the namespace do not reached the device limit and cannot list the devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", MaxDevices: 3, DevicesAcceptedCount: 2, DevicesRemovedCount: 0}, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableIfNotAccepted, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, errors.New("error", "layer", 0)). + Once() + }, + expected: Expected{ + devices: []models.Device{}, + count: 0, + err: errors.New("error", "layer", 0), + }, + }, + { + description: "[cloud] succeeds when the namespace do not reached the device limit and list the devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", MaxDevices: 3, DevicesAcceptedCount: 2, DevicesRemovedCount: 0}, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableIfNotAccepted, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, nil). + Once() + }, + expected: Expected{ + devices: []models.Device{}, + count: 0, + err: nil, + }, + }, + { + description: "[enterprise|community] fails when the namespace reached the device limit and cannot list the devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", MaxDevices: 3, DevicesAcceptedCount: 3}, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("true"). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, errors.New("error", "layer", 0)). + Once() + }, + expected: Expected{ + devices: []models.Device{}, + count: 0, + err: errors.New("error", "layer", 0), + }, + }, + { + description: "[enterprise|community] succeeds when the namespace reached the device limit and list the devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", MaxDevices: 3, DevicesAcceptedCount: 3}, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("true"). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, nil). + Once() + }, + expected: Expected{ + devices: []models.Device{}, + count: 0, + err: nil, + }, + }, + { + description: "[enterprise|community] fails when the namespace do not reached the device limit and cannot list the devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", MaxDevices: 3, DevicesAcceptedCount: 2}, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("true"). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableIfNotAccepted, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, errors.New("error", "layer", 0)). + Once() + }, + expected: Expected{ + devices: []models.Device{}, + count: 0, + err: errors.New("error", "layer", 0), + }, + }, + { + description: "[enterprise|community] succeeds when the namespace do not reached the device limit and list the devices", + req: &requests.DeviceList{ + TenantID: "00000000-0000-4000-0000-000000000000", + DeviceStatus: models.DeviceStatusAccepted, + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Sorter: query.Sorter{By: "created_at", Order: "asc"}, + Filters: query.Filters{}, + }, + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderAsc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", MaxDevices: 3, DevicesAcceptedCount: 2}, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("true"). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableIfNotAccepted, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, nil). + Once() + }, + expected: Expected{ + devices: []models.Device{}, + count: 0, + err: nil, }, }, } + service := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + for _, tc := range cases { - t.Run(tc.description, func(*testing.T) { - tc.requiredMocks() + t.Run(tc.description, func(tt *testing.T) { + ctx := context.TODO() + tc.requiredMocks(ctx) - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - returnedDevices, count, err := service.ListDevices(ctx, tc.tenant, tc.pagination, tc.filter, tc.status, tc.sort, tc.order) - assert.Equal(t, tc.expected, Expected{returnedDevices, count, err}) + devices, count, err := service.ListDevices(ctx, tc.req) + require.Equal(tt, tc.expected, Expected{devices, count, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestGetDevice(t *testing.T) { - mock := new(mocks.Store) + mock := new(storemock.Store) ctx := context.TODO() @@ -322,8 +744,7 @@ func TestGetDevice(t *testing.T) { { description: "fails when the store get device fails", requiredMocks: func() { - mock.On("DeviceGet", ctx, models.UID("_uid")). - Return(nil, errors.New("error", "", 0)).Once() + mock.On("DeviceResolve", ctx, store.DeviceUIDResolver, "_uid").Return(nil, errors.New("error", "", 0)).Once() }, uid: models.UID("_uid"), expected: Expected{ @@ -335,9 +756,7 @@ func TestGetDevice(t *testing.T) { description: "succeeds", requiredMocks: func() { device := &models.Device{UID: "uid"} - - mock.On("DeviceGet", ctx, models.UID("uid")). - Return(device, nil).Once() + mock.On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid").Return(device, nil).Once() }, uid: models.UID("uid"), expected: Expected{ @@ -351,7 +770,7 @@ func TestGetDevice(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock) returnedDevice, err := service.GetDevice(ctx, tc.uid) assert.Equal(t, tc.expected, Expected{returnedDevice, err}) @@ -361,8 +780,172 @@ func TestGetDevice(t *testing.T) { mock.AssertExpectations(t) } +func TestResolveDevice(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + ctx := context.TODO() + + type Expected struct { + device *models.Device + err error + } + + cases := []struct { + description string + requiredMocks func() + req *requests.ResolveDevice + expected Expected + }{ + { + description: "fails when namespace does not exists", + req: &requests.ResolveDevice{TenantID: "00000000-0000-0000-0000-000000000000", UID: "uid", Hostname: ""}, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(nil, errors.New("error", "", 0)). + Once() + }, + expected: Expected{ + nil, + NewErrNamespaceNotFound("00000000-0000-0000-0000-000000000000", errors.New("error", "", 0)), + }, + }, + { + description: "fails when cannot retrieve a device with the specified UID", + req: &requests.ResolveDevice{TenantID: "00000000-0000-0000-0000-000000000000", UID: "uid", Hostname: ""}, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{Name: "namespace", TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error", "", 0)). + Once() + }, + expected: Expected{ + nil, + NewErrDeviceNotFound(models.UID(""), errors.New("error", "", 0)), + }, + }, + { + description: "succeeds to fetch a device using UID", + req: &requests.ResolveDevice{TenantID: "00000000-0000-0000-0000-000000000000", UID: "uid", Hostname: ""}, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{Name: "namespace", TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(&models.Device{UID: "uid"}, nil). + Once() + }, + expected: Expected{ + &models.Device{UID: "uid"}, + nil, + }, + }, + { + description: "fails when cannot retrieve a device with the specified hostname", + req: &requests.ResolveDevice{TenantID: "00000000-0000-0000-0000-000000000000", UID: "", Hostname: "hostname"}, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{Name: "namespace", TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "hostname", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error", "", 0)). + Once() + }, + expected: Expected{ + nil, + NewErrDeviceNotFound(models.UID(""), errors.New("error", "", 0)), + }, + }, + { + description: "succeeds to fetch a device using hostname", + req: &requests.ResolveDevice{TenantID: "00000000-0000-0000-0000-000000000000", UID: "", Hostname: "hostname"}, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{Name: "namespace", TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "hostname", mock.AnythingOfType("store.QueryOption")). + Return(&models.Device{UID: "uid"}, nil). + Once() + }, + expected: Expected{ + &models.Device{UID: "uid"}, + nil, + }, + }, + { + description: "succeeds to fetch a device using uid when both are provided", + req: &requests.ResolveDevice{TenantID: "00000000-0000-0000-0000-000000000000", UID: "uid", Hostname: "hostname"}, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{Name: "namespace", TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(&models.Device{UID: "uid"}, nil). + Once() + }, + expected: Expected{ + &models.Device{UID: "uid"}, + nil, + }, + }, + } + + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + device, err := s.ResolveDevice(ctx, tc.req) + assert.Equal(t, tc.expected, Expected{device, err}) + }) + } + + storeMock.AssertExpectations(t) +} + func TestDeleteDevice(t *testing.T) { - mock := new(mocks.Store) + now := time.Now() + clockMock := new(clockmock.Clock) + clockMock.On("Now").Return(now) + clock.DefaultBackend = clockMock + + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() @@ -378,325 +961,384 @@ func TestDeleteDevice(t *testing.T) { uid: models.UID("_uid"), tenant: "tenant", requiredMocks: func() { - mock.On("DeviceGetByUID", ctx, models.UID("_uid"), "tenant"). - Return(nil, errors.New("error", "", 0)).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "_uid", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error", "", 0)). + Once() }, expected: NewErrDeviceNotFound(models.UID("_uid"), errors.New("error", "", 0)), }, { - description: "fails when the store namespace get fails", + description: "fails when the store device delete fails (hard-delete pending)", uid: models.UID("uid"), tenant: "tenant", requiredMocks: func() { device := &models.Device{ UID: "uid", + Status: models.DeviceStatusPending, TenantID: "tenant", CreatedAt: time.Time{}, } - - mock.On("DeviceGetByUID", ctx, models.UID(device.UID), "tenant"). - Return(device, nil).Once() - mock.On("NamespaceGet", ctx, "tenant"). - Return(nil, errors.New("error", "", 0)).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceDelete", ctx, device). + Return(errors.New("error", "", 0)). + Once() }, - expected: NewErrNamespaceNotFound("tenant", errors.New("error", "", 0)), + expected: errors.New("error", "", 0), }, { - description: "fails when device removed insert return error", + description: "succeeds", uid: models.UID("uid"), tenant: "tenant", requiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "id", - TenantID: "tenant", - Members: []models.Member{ - { - ID: "id", - Role: guard.RoleOwner, - }, - { - ID: "id2", - Role: guard.RoleObserver, - }, - }, - MaxDevices: 3, - } - device := &models.Device{ UID: "uid", + Status: models.DeviceStatusAccepted, TenantID: "tenant", CreatedAt: time.Time{}, } - - mock.On("DeviceGetByUID", ctx, models.UID(device.UID), "tenant"). - Return(device, nil).Once() - mock.On("NamespaceGet", ctx, "tenant"). - Return(namespace, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Once() - envMock.On("Get", "SHELLHUB_BILLING").Return("true").Once() - mock.On("DeviceRemovedInsert", ctx, "tenant", device). - Return(errors.New("error", "", 0)).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.Status = models.DeviceStatusRemoved + expectedDevice.RemovedAt = &now + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant", models.DeviceStatusRemoved, int64(1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant", models.DeviceStatusAccepted, int64(-1)). + Return(nil). + Once() }, - expected: NewErrDeviceRemovedInsert(errors.New("error", "", 0)), + expected: nil, }, { - description: "fails when the store device delete fails", + description: "[with_billing] fails when cannot update the device", uid: models.UID("uid"), tenant: "tenant", requiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "id", - TenantID: "tenant", - Members: []models.Member{ - { - ID: "id", - Role: guard.RoleOwner, - }, - { - ID: "id2", - Role: guard.RoleObserver, - }, - }, - MaxDevices: 3, - } + device := &models.Device{UID: "uid", TenantID: "tenant", CreatedAt: time.Time{}, Status: models.DeviceStatusAccepted} - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - CreatedAt: time.Time{}, - } + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() - mock.On("DeviceGetByUID", ctx, models.UID(device.UID), "tenant"). - Return(device, nil).Once() - mock.On("NamespaceGet", ctx, "tenant"). - Return(namespace, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - mock.On("DeviceDelete", ctx, models.UID(device.UID)). - Return(errors.New("error", "", 0)).Once() + expectedDevice := *device + expectedDevice.Status = models.DeviceStatusRemoved + expectedDevice.RemovedAt = &now + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(errors.New("error", "", 0)). + Once() }, expected: errors.New("error", "", 0), }, { - description: "succeeds", + description: "[with_billing] succeeds", uid: models.UID("uid"), tenant: "tenant", requiredMocks: func() { - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - CreatedAt: time.Time{}, - } - - mock.On("DeviceGetByUID", ctx, models.UID(device.UID), "tenant"). - Return(device, nil).Once() - mock.On("NamespaceGet", ctx, "tenant"). - Return(&models.Namespace{TenantID: "tenant"}, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - mock.On("DeviceDelete", ctx, models.UID(device.UID)). - Return(nil).Once() + device := &models.Device{UID: "uid", Status: models.DeviceStatusAccepted, TenantID: "tenant", CreatedAt: time.Time{}} + + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.Status = models.DeviceStatusRemoved + expectedDevice.RemovedAt = &now + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant", models.DeviceStatusRemoved, int64(1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant", models.DeviceStatusAccepted, int64(-1)). + Return(nil). + Once() }, expected: nil, }, - /*{ - description: "fails to report usage", + { + description: "[with_billing] succeeds but device status isn't accepted", uid: models.UID("uid"), tenant: "tenant", requiredMocks: func() { - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - CreatedAt: time.Time{}, - } - - namespaceBilling := &models.Namespace{ - Name: "namespace1", - MaxDevices: -1, - Billing: &models.Billing{ - Active: true, - }, - } - mock.On("DeviceGetByUID", ctx, models.UID(device.UID), "tenant"). - Return(device, nil).Once() - mock.On("NamespaceGet", ctx, "tenant"). - Return(namespaceBilling, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - clockMock.On("Now").Return(now).Twice() - envMock.On("Get", "SHELLHUB_BILLING").Return(strconv.FormatBool(true)).Once() - clientMock.On("ReportUsage", &models.UsageRecord{ - Device: device, - Namespace: namespaceBilling, - Timestamp: now.Unix(), - }).Return(500, nil).Once() - }, - expected: ErrReport, - }, - { - description: "reports usage with success", + device := &models.Device{UID: "uid", Status: models.DeviceStatusPending, TenantID: "tenant", CreatedAt: time.Time{}} + + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceDelete", ctx, device). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant", models.DeviceStatusPending, int64(-1)). + Return(nil). + Once() + }, + expected: nil, + }, + { + description: "[with_billing] succeeds and soft-deletes device even when billing is active", uid: models.UID("uid"), tenant: "tenant", requiredMocks: func() { - device := &models.Device{ - UID: "uid", - TenantID: "tenant", - CreatedAt: time.Time{}, - } - - namespaceBilling := &models.Namespace{ - Name: "namespace1", - Members: []models.Member{{ID: "id", Role: guard.RoleOwner}, {ID: "id2", Role: guard.RoleObserver}}, - Billing: &models.Billing{ - Active: true, - }, - } - - mock.On("DeviceGetByUID", ctx, models.UID(device.UID), "tenant"). - Return(device, nil).Once() - mock.On("NamespaceGet", ctx, "tenant"). - Return(namespaceBilling, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - clockMock.On("Now").Return(now).Twice() - envMock.On("Get", "SHELLHUB_BILLING").Return(strconv.FormatBool(true)).Once() - clientMock.On("ReportUsage", &models.UsageRecord{ - Device: device, - Namespace: namespaceBilling, - Timestamp: now.Unix(), - }).Return(200, nil).Once() - mock.On("DeviceDelete", ctx, models.UID(device.UID)). - Return(nil).Once() + device := &models.Device{UID: "uid", Status: models.DeviceStatusAccepted, TenantID: "tenant", CreatedAt: time.Time{}} + + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.Status = models.DeviceStatusRemoved + expectedDevice.RemovedAt = &now + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant", models.DeviceStatusRemoved, int64(1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant", models.DeviceStatusAccepted, int64(-1)). + Return(nil). + Once() }, expected: nil, - },*/ + }, } for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + service := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) err := service.DeleteDevice(ctx, tc.uid, tc.tenant) assert.Equal(t, tc.expected, err) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestRenameDevice(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) cases := []struct { - description string - requiredMocks func(device *models.Device) + name string uid models.UID - device *models.Device deviceNewName string tenant string + mocks func(context.Context) expected error }{ { - description: "fails when store device get fails", - tenant: "tenant", - uid: models.UID("uid"), - device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, - requiredMocks: func(device *models.Device) { - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "tenant").Return(device, errors.New("error", "", 0)).Once() + name: "fails when store device resolve fails", + uid: models.UID("uid"), + deviceNewName: "newname", + tenant: "tenant", + mocks: func(ctx context.Context) { + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error", "", 0)). + Once() }, expected: NewErrDeviceNotFound(models.UID("uid"), errors.New("error", "", 0)), }, { - description: "returns nil if the name is the same", - tenant: "tenant", - deviceNewName: "name", + name: "succeeds when name is the same", uid: models.UID("uid"), - device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, - requiredMocks: func(device *models.Device) { - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "tenant").Return(device, nil).Once() + deviceNewName: "name", + tenant: "tenant", + mocks: func(ctx context.Context) { + device := &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"} + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() }, expected: nil, }, { - description: "fails when store get by device name fails", - tenant: "tenant", - deviceNewName: "newname", + name: "succeeds when name is the same but different case", uid: models.UID("uid"), - device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, - requiredMocks: func(device *models.Device) { - device2 := &models.Device{ - UID: "uid2", - Name: "newname", - TenantID: "tenant2", - } - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "tenant").Return(device, nil).Once() - mock.On("DeviceGetByName", ctx, "newname", "tenant", models.DeviceStatusAccepted).Return(device2, errors.New("error", "", 0)).Once() + deviceNewName: "NAME", + tenant: "tenant", + mocks: func(ctx context.Context) { + device := &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"} + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() }, - expected: NewErrDeviceNotFound(models.UID("uid"), errors.New("error", "", 0)), + expected: nil, }, { - description: "fails when the name already exists", - tenant: "tenant", - deviceNewName: "newname", + name: "fails when device update fails", uid: models.UID("uid"), - device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, - requiredMocks: func(device *models.Device) { - device2 := &models.Device{ - UID: "uid2", - Name: "newname", - TenantID: "tenant2", - } - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "tenant").Return(device, nil).Once() - mock.On("DeviceGetByName", ctx, "newname", "tenant", models.DeviceStatusAccepted).Return(device2, nil).Once() + deviceNewName: "newname", + tenant: "tenant", + mocks: func(ctx context.Context) { + device := &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"} + updatedDevice := &models.Device{UID: "uid", Name: "newname", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"} + + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(errors.New("error", "", 0)). + Once() }, - expected: NewErrDeviceDuplicated("newname", nil), + expected: errors.New("error", "", 0), }, { - description: "fails when the store device rename fails", - tenant: "tenant", - deviceNewName: "anewname", + name: "succeeds", uid: models.UID("uid"), - device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, - requiredMocks: func(device *models.Device) { - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "tenant").Return(device, nil).Once() - mock.On("DeviceGetByName", ctx, "anewname", "tenant", models.DeviceStatusAccepted).Return(nil, store.ErrNoDocuments).Once() - mock.On("DeviceRename", ctx, models.UID("uid"), "anewname").Return(errors.New("error", "", 0)).Once() + deviceNewName: "newname", + tenant: "tenant", + mocks: func(ctx context.Context) { + device := &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"} + + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.Name = "newname" + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() }, - expected: errors.New("error", "", 0), + expected: nil, }, { - description: "succeeds", - tenant: "tenant", - deviceNewName: "anewname", + name: "succeeds and converts name to lowercase", uid: models.UID("uid"), - device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, - requiredMocks: func(device *models.Device) { - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "tenant").Return(device, nil).Once() - mock.On("DeviceGetByName", ctx, "anewname", "tenant", models.DeviceStatusAccepted).Return(nil, store.ErrNoDocuments).Once() - mock.On("DeviceRename", ctx, models.UID("uid"), "anewname").Return(nil).Once() + deviceNewName: "NewName", + tenant: "tenant", + mocks: func(ctx context.Context) { + device := &models.Device{UID: "uid", Name: "oldname", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"} + + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.Name = "newname" + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() }, expected: nil, }, } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks(tc.device) + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.RenameDevice(ctx, tc.uid, tc.deviceNewName, tc.tenant) + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + ctx := context.Background() + tc.mocks(ctx) + err := s.RenameDevice(ctx, tc.uid, tc.deviceNewName, tc.tenant) assert.Equal(t, tc.expected, err) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestLookupDevice(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() @@ -713,37 +1355,82 @@ func TestLookupDevice(t *testing.T) { expected Expected }{ { - description: "fails when store device lookup fails", + description: "fails when namespace does not exists", namespace: "namespace", device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, - requiredMocks: func(device *models.Device, namespace string) { - mock.On("DeviceLookup", ctx, namespace, device.Name).Return(nil, errors.New("error", "", 0)).Once() + requiredMocks: func(_ *models.Device, namespace string) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceNameResolver, namespace). + Return(nil, errors.New("error", "", 0)). + Once() }, expected: Expected{ nil, - NewErrDeviceLookupNotFound("namespace", "name", errors.New("error", "", 0)), + NewErrNamespaceNotFound("namespace", errors.New("error", "", 0)), }, }, { - description: "fails when the device is not found", + description: "fails when device is not found", namespace: "namespace", device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, requiredMocks: func(device *models.Device, namespace string) { - mock.On("DeviceLookup", ctx, namespace, device.Name). - Return(nil, store.ErrNoDocuments).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceNameResolver, namespace). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + storeMock. + On( + "DeviceResolve", + ctx, + store.DeviceHostnameResolver, + "name", + mock.AnythingOfType("store.QueryOption"), + mock.AnythingOfType("store.QueryOption"), + ). + Return(nil, errors.New("error", "", 0)). + Once() }, expected: Expected{ nil, - NewErrDeviceLookupNotFound("namespace", "name", store.ErrNoDocuments), + NewErrDeviceNotFound(models.UID("name"), errors.New("error", "", 0)), }, }, { - description: "succeeds", + description: "succeeds to lookup for device", namespace: "namespace", device: &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, requiredMocks: func(device *models.Device, namespace string) { - mock.On("DeviceLookup", ctx, namespace, device.Name). - Return(device, nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceNameResolver, namespace). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + storeMock. + On( + "DeviceResolve", + ctx, + store.DeviceHostnameResolver, + "name", + mock.AnythingOfType("store.QueryOption"), + mock.AnythingOfType("store.QueryOption"), + ). + Return(device, nil). + Once() }, expected: Expected{ &models.Device{UID: "uid", Name: "name", TenantID: "tenant", Identity: &models.DeviceIdentity{MAC: "00:00:00:00:00:00"}, Status: "accepted"}, @@ -756,2109 +1443,1654 @@ func TestLookupDevice(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks(tc.device, tc.namespace) - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + service := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) returnedDevice, err := service.LookupDevice(ctx, tc.namespace, tc.device.Name) assert.Equal(t, tc.expected, Expected{returnedDevice, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } -func TestOffineDevice(t *testing.T) { - mock := new(mocks.Store) +func TestOfflineDevice(t *testing.T) { + now := time.Now() + clockMock := new(clockmock.Clock) + clockMock.On("Now").Return(now) + clock.DefaultBackend = clockMock - ctx := context.TODO() + storeMock := new(storemock.Store) cases := []struct { - name string - uid models.UID - online bool - requiredMocks func() - expected error + name string + uid models.UID + mocks func(context.Context) + expected error }{ { - name: "fails when store device online fails", + name: "fails when device does not exist", uid: models.UID("uid"), - requiredMocks: func() { - clockMock.On("Now").Return(now).Once() - mock.On("DeviceSetOnline", ctx, models.UID("uid"), now, false). - Return(errors.New("error", "", 0)).Once() + mocks: func(ctx context.Context) { + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid"). + Return(nil, store.ErrNoDocuments). + Once() }, - expected: errors.New("error", "", 0), + expected: NewErrDeviceNotFound(models.UID("uid"), store.ErrNoDocuments), }, { - name: "succeeds", - uid: models.UID("uid"), - online: true, - requiredMocks: func() { - online := true - clockMock.On("Now").Return(now).Once() - mock.On("DeviceSetOnline", ctx, models.UID("uid"), now, online). - Return(errors.New("error", "", 0)).Once() + name: "fails when device resolve returns nil device", + uid: models.UID("uid"), + mocks: func(ctx context.Context) { + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid"). + Return(nil, nil). + Once() + }, + expected: NewErrDeviceNotFound(models.UID("uid"), nil), + }, + { + name: "fails when cannot update the device", + uid: models.UID("uid"), + mocks: func(ctx context.Context) { + device := &models.Device{UID: "uid"} + + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid"). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.DisconnectedAt = &now + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(errors.New("error", "", 0)). + Once() }, expected: errors.New("error", "", 0), }, + { + name: "fails when device update returns ErrNoDocuments", + uid: models.UID("uid"), + mocks: func(ctx context.Context) { + device := &models.Device{UID: "uid"} + + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid"). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.DisconnectedAt = &now + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(store.ErrNoDocuments). + Once() + }, + expected: NewErrDeviceNotFound(models.UID("uid"), store.ErrNoDocuments), + }, + { + name: "succeeds", + uid: models.UID("uid"), + mocks: func(ctx context.Context) { + device := &models.Device{UID: "uid"} + + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid"). + Return(device, nil). + Once() + + expectedDevice := *device + expectedDevice.DisconnectedAt = &now + + storeMock. + On("DeviceUpdate", ctx, &expectedDevice). + Return(nil). + Once() + }, + expected: nil, + }, } + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { - tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.OffineDevice(ctx, tc.uid, tc.online) - assert.Equal(t, tc.expected, err) + ctx := context.Background() + tc.mocks(ctx) + assert.Equal(t, tc.expected, s.OfflineDevice(ctx, tc.uid)) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } -func TestUpdateDeviceStatus_same_mac(t *testing.T) { - mock := new(mocks.Store) +func TestUpdateDeviceStatus(t *testing.T) { + now := time.Now() + clockMock := new(clockmock.Clock) + clockMock.On("Now").Return(now) + clock.DefaultBackend = clockMock - ctx := context.TODO() + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + envMock := new(envsmocks.Backend) + envs.DefaultBackend = envMock + + ctx := context.Background() cases := []struct { description string - uid models.UID - status models.DeviceStatus - tenant string + req *requests.DeviceUpdateStatus requiredMocks func() - expected error + expectedError error }{ { - description: "fails when could not get the namespace", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() + description: "failure - namespace not found", + req: &requests.DeviceUpdateStatus{ + TenantID: "invalid-tenant", + UID: "new-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "invalid-tenant"). + Return(nil, errors.New("namespace not found", "", 0)). + Once() + }, + expectedError: NewErrNamespaceNotFound("invalid-tenant", errors.New("namespace not found", "", 0)), + }, + { + description: "failure - device not found", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "nonexistent-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "nonexistent-device", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("device not found", "", 0)). + Once() + }, + expectedError: NewErrDeviceNotFound(models.UID("nonexistent-device"), errors.New("device not found", "", 0)), + }, + { + description: "failure - device already accepted", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "accepted-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "accepted-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "accepted-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + }, + expectedError: NewErrDeviceStatusAccepted(nil), + }, + { + description: "success - same status", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "new-device", + Status: "pending", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "new-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "accepted-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() }, - expected: NewErrNamespaceNotFound("00000000-0000-0000-0000-000000000000", errors.New("error", "", 0)), + expectedError: nil, }, { - description: "fails when could not get the devcie", - uid: models.UID("uid"), - tenant: "00000000-0000-0000-0000-000000000000", - status: "accepted", + description: "success (rejected) - status change to pending", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "device-to-pending", + Status: "pending", + }, requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() + device := &models.Device{ + UID: "device-to-pending", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusRejected, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + updatedDevice := &models.Device{ + UID: "device-to-pending", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + StatusUpdatedAt: now, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "device-to-pending", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusRejected, int64(-1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusPending, int64(1)). + Return(nil). + Once() + }, + expectedError: nil, + }, + { + description: "success (pending) - status change to rejected", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "device-to-reject", + Status: "rejected", }, - expected: NewErrDeviceNotFound("uid", errors.New("error", "", 0)), - }, - { - description: "fails when device already accepted", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() + device := &models.Device{ + UID: "device-to-reject", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + updatedDevice := &models.Device{ + UID: "device-to-reject", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusRejected, + StatusUpdatedAt: now, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "accepted", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "device-to-reject", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusPending, int64(-1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusRejected, int64(1)). + Return(nil). + Once() + }, + expectedError: nil, + }, + { + description: "failure (accepted) (same MAC) - hostname conflict", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "conflicting-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "conflicting-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "conflicting-device", + Name: "device-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "old-device", + Name: "device-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "device-name", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "old-device", + Name: "device-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + Identity: &models.DeviceIdentity{MAC: "00:11:22:33:44:55"}, + }, + nil, + ). + Once() }, - expected: NewErrDeviceStatusAccepted(nil), + expectedError: NewErrDeviceDuplicated("device-name", nil), }, { - description: "fails when could not get the device by MAC", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, errors.New("error", "", 0)).Once() + description: "success (accepted) (same MAC) - device merge", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "new-device", + Status: "accepted", }, - expected: NewErrDeviceNotFound(models.UID("uid"), errors.New("error", "", 0)), - }, - { - description: "fails when already exist a device with same name and a different mac", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(&models.Device{ - UID: "uid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "anothermac"}, - }, nil).Once() + newDevice := &models.Device{ + UID: "new-device", + Name: "device-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + oldDevice := &models.Device{ + UID: "old-device", + Name: "device-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + mergedDevice := &models.Device{ + UID: "new-device", + Name: "device-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + finalDevice := &models.Device{ + UID: "new-device", + Name: "device-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + StatusUpdatedAt: now, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "new-device", mock.AnythingOfType("store.QueryOption")). + Return(newDevice, nil). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(oldDevice, nil). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "device-name", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(oldDevice, nil). + Once() + // Merge operations + storeMock. + On("TunnelUpdateDeviceUID", ctx, "00000000-0000-0000-0000-000000000000", "old-device", "new-device"). + Return(nil). + Once() + storeMock. + On("SessionUpdateDeviceUID", ctx, models.UID("old-device"), models.UID("new-device")). + Return(nil). + Once() + storeMock. + On("DeviceUpdate", ctx, mergedDevice). + Return(nil). + Once() + storeMock. + On("DeviceDelete", ctx, oldDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted, int64(-1)). + Return(nil). + Once() + // Final status update + storeMock. + On("DeviceUpdate", ctx, finalDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusPending, int64(-1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted, int64(1)). + Return(nil). + Once() + }, + expectedError: nil, + }, + { + description: "failure (accepted) (different MAC) - hostname conflict", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "conflicting-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "conflicting-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "conflicting-device", + Name: "duplicate-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "duplicate-name", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "existing-device", + Name: "duplicate-name", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + Identity: &models.DeviceIdentity{MAC: "11:22:33:44:55:66"}, + }, + nil, + ). + Once() }, - expected: NewErrDeviceDuplicated("name", nil), + expectedError: NewErrDeviceDuplicated("duplicate-name", nil), }, { - description: "fails to update device UID", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("SessionUpdateDeviceUID", ctx, models.UID("notsameuid"), models.UID("uid")). - Return(errors.New("error", "", 0)).Once() + description: "failure (accepted) (different MAC) - device limit reached [community]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "limit-device", + Status: "accepted", }, - expected: errors.New("error", "", 0), - }, - { - description: "fails to update device to the old name", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("SessionUpdateDeviceUID", ctx, models.UID("notsameuid"), models.UID("uid")). - Return(nil).Once() - - mock.On("DeviceRename", ctx, models.UID("uid"), "name"). - Return(errors.New("error", "", 0)).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + MaxDevices: 3, + DevicesAcceptedCount: 3, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "limit-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "limit-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + }, + expectedError: NewErrDeviceMaxDevicesReached(3), + }, + { + description: "success (accepted) (different MAC) - device acceptance [community]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "pending-device", + Status: "accepted", }, - expected: errors.New("error", "", 0), - }, - { - description: "fails to delete device with the same mac", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("SessionUpdateDeviceUID", ctx, models.UID("notsameuid"), models.UID("uid")). - Return(nil).Once() - - mock.On("DeviceRename", ctx, models.UID("uid"), "name"). - Return(nil).Once() - - mock.On("DeviceDelete", ctx, models.UID("notsameuid")). - Return(errors.New("error", "", 0)).Once() + device := &models.Device{ + UID: "pending-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + updatedDevice := &models.Device{ + UID: "pending-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + StatusUpdatedAt: now, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "pending-device", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusPending, int64(-1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted, int64(1)). + Return(nil). + Once() + }, + expectedError: nil, + }, + { + description: "failure (accepted) (different MAC) - device limit reached [enterprise]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "limit-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + MaxDevices: 3, + DevicesAcceptedCount: 3, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "limit-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "limit-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + }, + expectedError: NewErrDeviceMaxDevicesReached(3), + }, + { + description: "success (accepted) (different MAC) - device acceptance [enterprise]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "pending-device", + Status: "accepted", }, - expected: errors.New("error", "", 0), - }, - { - description: "fails to update device status", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("SessionUpdateDeviceUID", ctx, models.UID("notsameuid"), models.UID("uid")). - Return(nil).Once() - - mock.On("DeviceRename", ctx, models.UID("uid"), "name"). - Return(nil).Once() - - mock.On("DeviceDelete", ctx, models.UID("notsameuid")). - Return(nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(errors.New("error", "", 0)).Once() + device := &models.Device{ + UID: "pending-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + updatedDevice := &models.Device{ + UID: "pending-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + StatusUpdatedAt: now, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return(&models.Namespace{TenantID: "00000000-0000-0000-0000-000000000000"}, nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "pending-device", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusPending, int64(-1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted, int64(1)). + Return(nil). + Once() + }, + expectedError: nil, + }, + { + description: "failure (accepted) (different MAC) (billing inactive) (removed device) - billing evaluate [cloud]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "removed-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + Billing: &models.Billing{Active: false}, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "removed-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "removed-device", + RemovedAt: &now, + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + clientMock. + On("BillingEvaluate", mock.Anything, "00000000-0000-0000-0000-000000000000"). + Return(&models.BillingEvaluation{CanAccept: false}, errors.New("error", "store", 0)). + Once() + }, + expectedError: NewErrBillingEvaluate(errors.New("evaluate error", "service", 4)), + }, + { + description: "failure (accepted) (different MAC) (billing inactive) - device limit reached without billing [cloud]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "limit-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + MaxDevices: 3, + DevicesAcceptedCount: 3, + Billing: &models.Billing{Active: false}, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "limit-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "limit-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + }, + expectedError: NewErrDeviceLimit(3, nil), + }, + { + description: "failure (accepted) (different MAC) (billing inactive) (removed device) - can't accept [cloud]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "removed-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + Billing: &models.Billing{Active: false}, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "removed-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "removed-device", + RemovedAt: &now, + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + clientMock. + On("BillingEvaluate", mock.Anything, "00000000-0000-0000-0000-000000000000"). + Return(&models.BillingEvaluation{CanAccept: false}, nil). + Once() + }, + expectedError: ErrDeviceLimit, + }, + { + description: "success (accepted) (different MAC) (billing inactive) (removed device) - [cloud]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "removed-device", + Status: "accepted", }, - expected: errors.New("error", "", 0), - }, - { - description: "success to update device status", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(&models.Device{ - UID: "notsameuid", - Name: "name", - Identity: &models.DeviceIdentity{MAC: "mac"}, - }, nil).Once() - - mock.On("SessionUpdateDeviceUID", ctx, models.UID("notsameuid"), models.UID("uid")). - Return(nil).Once() - - mock.On("DeviceRename", ctx, models.UID("uid"), "name"). - Return(nil).Once() - - mock.On("DeviceDelete", ctx, models.UID("notsameuid")). - Return(nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(nil).Once() + device := &models.Device{ + UID: "removed-device", + RemovedAt: &now, + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + updatedDevice := &models.Device{ + UID: "removed-device", + RemovedAt: &now, + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + StatusUpdatedAt: now, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + Billing: &models.Billing{Active: false}, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "removed-device", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + clientMock. + On("BillingEvaluate", mock.Anything, "00000000-0000-0000-0000-000000000000"). + Return(&models.BillingEvaluation{CanAccept: true}, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusPending, int64(-1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted, int64(1)). + Return(nil). + Once() + }, + expectedError: nil, + }, + { + description: "failure (accepted) (different MAC) (billing active) - billing report error [cloud]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "billing-error-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + Billing: &models.Billing{Active: true}, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "billing-error-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "billing-error-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + clientMock. + On("BillingReport", mock.Anything, "00000000-0000-0000-0000-000000000000", ReportDeviceAccept). + Return(errors.New("billing error", "", 0)). + Once() + }, + expectedError: NewErrBillingReportNamespaceDelete(ErrReport), + }, + { + description: "failure (accepted) (different MAC) (billing active) - payment required [cloud]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "payment-required-device", + Status: "accepted", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + Billing: &models.Billing{Active: true}, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "payment-required-device", mock.AnythingOfType("store.QueryOption")). + Return( + &models.Device{ + UID: "payment-required-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + }, + nil, + ). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + clientMock. + On("BillingReport", mock.Anything, "00000000-0000-0000-0000-000000000000", ReportDeviceAccept). + Return(&req.Error{Code: 402, Message: ""}). + Once() + }, + expectedError: NewErrBillingReportNamespaceDelete(ErrPaymentRequired), + }, + { + description: "success (accepted) (different MAC) (billing active) - device acceptance [cloud]", + req: &requests.DeviceUpdateStatus{ + TenantID: "00000000-0000-0000-0000-000000000000", + UID: "cloud-device", + Status: "accepted", }, - expected: nil, + requiredMocks: func() { + device := &models.Device{ + UID: "cloud-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusPending, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + updatedDevice := &models.Device{ + UID: "cloud-device", + Name: "test-device", + TenantID: "00000000-0000-0000-0000-000000000000", + Status: models.DeviceStatusAccepted, + StatusUpdatedAt: now, + Identity: &models.DeviceIdentity{MAC: "aa:bb:cc:dd:ee:ff"}, + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-0000-0000-000000000000"). + Return( + &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000000", + Billing: &models.Billing{Active: true}, + }, + nil, + ). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "cloud-device", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceMACResolver, "aa:bb:cc:dd:ee:ff", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + queryOptionsMock. + On("WithDeviceStatus", models.DeviceStatusAccepted). + Return(nil). + Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceHostnameResolver, "test-device", mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + clientMock. + On("BillingReport", mock.Anything, "00000000-0000-0000-0000-000000000000", ReportDeviceAccept). + Return(nil). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusPending, int64(-1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted, int64(1)). + Return(nil). + Once() + }, + expectedError: nil, }, } + service := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + + storeMock. + On("WithTransaction", ctx, mock.AnythingOfType("store.TransactionCb")). + Return(func(ctx context.Context, cb store.TransactionCb) error { return cb(ctx) }). + Times(len(cases)) + for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.UpdateDeviceStatus(ctx, tc.tenant, tc.uid, tc.status) - assert.Equal(t, tc.expected, err) + err := service.UpdateDeviceStatus(ctx, tc.req) + require.Equal(t, tc.expectedError, err) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) + envMock.AssertExpectations(t) } -func TestUpdateDeviceStatus_community_and_enterprise(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() +func TestDeviceUpdate(t *testing.T) { + now := time.Now() + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) cases := []struct { description string - uid models.UID - status models.DeviceStatus - tenant string - requiredMocks func() + req *requests.DeviceUpdate + requiredMocks func(ctx context.Context) expected error }{ { - description: "fails when could not get the namespace", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() + description: "fails when could not get the device by UID", + req: &requests.DeviceUpdate{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + TenantID: "00000000-0000-0000-0000-000000000000", + Name: "", }, - expected: NewErrNamespaceNotFound("00000000-0000-0000-0000-000000000000", errors.New("error", "", 0)), + requiredMocks: func(ctx context.Context) { + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error", "", 0)). + Once() + }, + expected: NewErrDeviceNotFound(models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), errors.New("error", "", 0)), }, { - description: "fails when could not get the devcie", - uid: models.UID("uid"), - tenant: "00000000-0000-0000-0000-000000000000", - status: "accepted", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() + description: "fails when already exists a device with same name", + req: &requests.DeviceUpdate{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + TenantID: "00000000-0000-0000-0000-000000000000", + Name: "name", + }, + requiredMocks: func(ctx context.Context) { + device := &models.Device{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + Name: "oldname", + DisconnectedAt: &now, + } + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceConflicts", ctx, &models.DeviceConflicts{Name: "name"}). + Return([]string{"name"}, true, nil). + Once() }, - expected: NewErrDeviceNotFound("uid", errors.New("error", "", 0)), + expected: NewErrDeviceDuplicated("name", nil), }, { - description: "fails when device already accepted", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "accepted", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() + description: "success when updating the device name to same name (case insensitive)", + req: &requests.DeviceUpdate{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + TenantID: "00000000-0000-0000-0000-000000000000", + Name: "NAME", + }, + requiredMocks: func(ctx context.Context) { + device := &models.Device{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + Name: "name", + DisconnectedAt: &now, + } + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceConflicts", ctx, &models.DeviceConflicts{Name: "NAME"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, device). + Return(nil). + Once() }, - expected: NewErrDeviceStatusAccepted(nil), + expected: nil, }, { - description: "fails when could not get the device by MAC", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, errors.New("error", "", 0)).Once() + description: "success when update device name", + req: &requests.DeviceUpdate{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + TenantID: "00000000-0000-0000-0000-000000000000", + Name: "newname", }, - expected: NewErrDeviceNotFound(models.UID("uid"), errors.New("error", "", 0)), + requiredMocks: func(ctx context.Context) { + device := &models.Device{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + Name: "oldname", + DisconnectedAt: &now, + } + updatedDevice := &models.Device{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + Name: "newname", + DisconnectedAt: &now, + } + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceConflicts", ctx, &models.DeviceConflicts{Name: "newname"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(nil). + Once() + }, + expected: nil, }, { - description: "fails when already exist a device with same name", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(&models.Device{ - UID: "fb2de504e98d3ccab342b53d83395cd7fda297c71e8da550c31478bae0dbb8c5", - Name: "name", - }, nil).Once() + description: "success when update device name with uppercase to lowercase", + req: &requests.DeviceUpdate{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + TenantID: "00000000-0000-0000-0000-000000000000", + Name: "NewName", }, - expected: NewErrDeviceDuplicated("name", nil), + requiredMocks: func(ctx context.Context) { + device := &models.Device{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + Name: "oldname", + DisconnectedAt: &now, + } + updatedDevice := &models.Device{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + Name: "newname", + DisconnectedAt: &now, + } + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceConflicts", ctx, &models.DeviceConflicts{Name: "NewName"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, updatedDevice). + Return(nil). + Once() + }, + expected: nil, }, { - description: "fails namespace has reached the limit of devices in community instance", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 3, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() + description: "success when name is empty", + req: &requests.DeviceUpdate{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + TenantID: "00000000-0000-0000-0000-000000000000", + Name: "", }, - expected: NewErrDeviceMaxDevicesReached(3), - }, - { - description: "fails namespace has reached the limit of devices in enterprise instance", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 3, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("true").Twice() - }, - expected: NewErrDeviceMaxDevicesReached(3), - }, - { - description: "fails when could not update device status on database", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success to update device status", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.UpdateDeviceStatus(ctx, tc.tenant, tc.uid, tc.status) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} - -func TestUpdateDeviceStatus_cloud_subscription_active(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - cases := []struct { - description string - uid models.UID - status models.DeviceStatus - tenant string - requiredMocks func() - expected error - }{ - { - description: "fails when could not get the namespace", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrNamespaceNotFound("00000000-0000-0000-0000-000000000000", errors.New("error", "", 0)), - }, - { - description: "fails when could not get the devcie", - uid: models.UID("uid"), - tenant: "00000000-0000-0000-0000-000000000000", - status: "accepted", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound("uid", errors.New("error", "", 0)), - }, - { - description: "fails when device already accepted", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "accepted", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - }, - expected: NewErrDeviceStatusAccepted(nil), - }, - { - description: "fails when could not get the device by MAC", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound(models.UID("uid"), errors.New("error", "", 0)), - }, - { - description: "fails when namespace has a subscription active and could not report the device accepted", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - Billing: &models.Billing{ - Active: true, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - clientMock.On("BillingReport", "00000000-0000-0000-0000-000000000000", "device_accept").Return(0, errors.New("error", "", 0)).Once() - }, - expected: NewErrBillingReportNamespaceDelete(errors.New("error", "", 0)), - }, - { - description: "fails when namespace has a subscription active and report block the action", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - Billing: &models.Billing{ - Active: true, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - clientMock.On("BillingReport", "00000000-0000-0000-0000-000000000000", "device_accept").Return(402, nil).Once() - }, - expected: NewErrBillingReportNamespaceDelete(ErrPaymentRequired), - }, - { - description: "fails when could not update device status on database", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - Billing: &models.Billing{ - Active: true, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - clientMock.On("BillingReport", "00000000-0000-0000-0000-000000000000", "device_accept").Return(200, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success to update device status", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - Billing: &models.Billing{ - Active: true, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - clientMock.On("BillingReport", "00000000-0000-0000-0000-000000000000", "device_accept").Return(200, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.UpdateDeviceStatus(ctx, tc.tenant, tc.uid, tc.status) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} - -func TestUpdateDeviceStatus_cloud_subscription_inactive(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - cases := []struct { - description string - uid models.UID - status models.DeviceStatus - tenant string - requiredMocks func() - expected error - }{ - { - description: "fails when could not get the namespace", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrNamespaceNotFound("00000000-0000-0000-0000-000000000000", errors.New("error", "", 0)), - }, - { - description: "fails when could not get the devcie", - uid: models.UID("uid"), - tenant: "00000000-0000-0000-0000-000000000000", - status: "accepted", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound("uid", errors.New("error", "", 0)), - }, - { - description: "fails when device already accepted", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "accepted", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - }, - expected: NewErrDeviceStatusAccepted(nil), - }, - { - description: "fails when could not get the device by MAC", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound(models.UID("uid"), errors.New("error", "", 0)), - }, - { - description: "fails when could not check if device was removed recently", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceRemovedGet(errors.New("error", "", 0)), - }, - { - description: "fails when could not count how many devices were removed recently", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil, nil).Once() - - mock.On("DeviceRemovedCount", ctx, "00000000-0000-0000-0000-000000000000"). - Return(int64(0), errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceRemovedCount(errors.New("error", "", 0)), - }, - { - description: "fails when namespace has reached the limit counting with removed devices", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil, nil).Once() - - mock.On("DeviceRemovedCount", ctx, "00000000-0000-0000-0000-000000000000"). - Return(int64(2), nil).Once() - }, - expected: NewErrDeviceRemovedFull(3, nil), - }, - { - description: "fails when could not evaluate the namespace capabilities when accepted device is not removed", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil, nil).Once() - - mock.On("DeviceRemovedCount", ctx, "00000000-0000-0000-0000-000000000000"). - Return(int64(1), nil).Once() - - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(nil, 0, errors.New("error", "", 0)).Once() - }, - expected: NewErrBillingEvaluate(ErrEvaluate), - }, - { - description: "fails when namespace cannot accept more devices", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil, nil).Once() - - mock.On("DeviceRemovedCount", ctx, "00000000-0000-0000-0000-000000000000"). - Return(int64(1), nil).Once() - - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{ - CanAccept: false, - }, 0, nil).Once() - }, - expected: ErrDeviceLimit, - }, - { - description: "fails to update the device status when device is not on removed list", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil, nil).Once() - - mock.On("DeviceRemovedCount", ctx, "00000000-0000-0000-0000-000000000000"). - Return(int64(1), nil).Once() - - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{ - CanAccept: true, - }, 0, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success to update the device status when device is not on removed list", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil, nil).Once() - - mock.On("DeviceRemovedCount", ctx, "00000000-0000-0000-0000-000000000000"). - Return(int64(1), nil).Once() - - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{ - CanAccept: true, - }, 0, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(nil).Once() - }, - expected: nil, - }, - { - description: "fail when could not remove the device from removed device list when device is on removed list", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(&models.DeviceRemoved{}, nil).Once() - - mock.On("DeviceRemovedDelete", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceRemovedDelete(errors.New("error", "", 0)), - }, - { - description: "fail when could not evaluate the namespace when device is on removed list", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(&models.DeviceRemoved{}, nil).Once() - - mock.On("DeviceRemovedDelete", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil).Once() - - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(nil, 0, errors.New("error", "", 0)).Once() - }, - expected: NewErrBillingEvaluate(ErrEvaluate), - }, - { - description: "fails when namespace evaluation block device acceptance when device is on removed list", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(&models.DeviceRemoved{}, nil).Once() - - mock.On("DeviceRemovedDelete", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil).Once() - - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{ - CanAccept: false, - }, 0, nil).Once() - }, - expected: ErrDeviceLimit, - }, - { - description: "fails to update device status when device is on removed list", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(&models.DeviceRemoved{}, nil).Once() - - mock.On("DeviceRemovedDelete", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil).Once() - - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{ - CanAccept: true, - }, 0, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success to update device status when device is on removed list", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - MaxDevices: 3, - DevicesCount: 1, - Billing: &models.Billing{ - Active: false, - }, - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceGetByMac", ctx, "mac", "00000000-0000-0000-0000-000000000000", models.DeviceStatus("accepted")). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceGetByName", ctx, "name", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() - envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - - mock.On("DeviceRemovedGet", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(&models.DeviceRemoved{}, nil).Once() - - mock.On("DeviceRemovedDelete", ctx, "00000000-0000-0000-0000-000000000000", models.UID("uid")). - Return(nil).Once() - - clientMock.On("BillingEvaluate", "00000000-0000-0000-0000-000000000000").Return(&models.BillingEvaluation{ - CanAccept: true, - }, 0, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("accepted")). - Return(nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.UpdateDeviceStatus(ctx, tc.tenant, tc.uid, tc.status) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} - -func TestSetDevicePosition(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - locator := &mocksGeoIp.Locator{} - - cases := []struct { - description string - requiredMocks func() - uid models.UID - ip string - expected error - }{ - { - description: "fails when DeviceSetPosition return error", - requiredMocks: func() { - positionGeoIP := geoip.Position{ - Longitude: 0, - Latitude: 0, - } - positionDeviceModel := models.DevicePosition{ - Longitude: 0, - Latitude: 0, - } - - locator.On("GetPosition", net.ParseIP("127.0.0.1")). - Return(positionGeoIP, nil).Once() - mock.On("DeviceSetPosition", ctx, models.UID("uid"), positionDeviceModel). - Return(errors.New("error", "", 0)).Once() - }, - uid: models.UID("uid"), - ip: "127.0.0.1", - expected: errors.New("error", "", 0), - }, - { - description: "success", - requiredMocks: func() { - positionGeoIP := geoip.Position{ - Longitude: 0, - Latitude: 0, - } - positionDeviceModel := models.DevicePosition{ - Longitude: 0, - Latitude: 0, + requiredMocks: func(ctx context.Context) { + device := &models.Device{ + UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", + Name: "existingname", + DisconnectedAt: &now, } - - locator.On("GetPosition", net.ParseIP("127.0.0.1")). - Return(positionGeoIP, nil).Once() - mock.On("DeviceSetPosition", ctx, models.UID("uid"), positionDeviceModel). - Return(nil).Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-0000-0000-000000000000"). + Return(nil). + Once() + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", mock.AnythingOfType("store.QueryOption")). + Return(device, nil). + Once() + storeMock. + On("DeviceConflicts", ctx, &models.DeviceConflicts{Name: ""}). + Return([]string{}, false, nil). + Once() + storeMock. + On("DeviceUpdate", ctx, device). + Return(nil). + Once() }, - uid: models.UID("uid"), - ip: "127.0.0.1", expected: nil, }, } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, locator) - err := service.SetDevicePosition(ctx, tc.uid, tc.ip) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} - -func TestDeviceHeartbeat(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - uid := models.UID("uid") - - clockMock.On("Now").Return(now).Once() - - mock.On("DeviceSetOnline", ctx, uid, now, true).Return(nil).Once() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.DeviceHeartbeat(ctx, uid) - assert.NoError(t, err) - - mock.AssertExpectations(t) -} - -func TestDeviceUpdate(t *testing.T) { - mock := new(mocks.Store) - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - - toPointer := func(s string) *string { - return &s - } - - other := toPointer("other") - - tests := []struct { - description string - uid string - tenant string - name *string - publicKey *bool - requiredMocks func(ctx context.Context) - expected error - }{ - { - description: "fails when could not get the device by UID", - uid: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - tenant: "00000000-0000-0000-0000-000000000000", - name: nil, - publicKey: nil, - requiredMocks: func(ctx context.Context) { - mock.On("DeviceGetByUID", ctx, models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound(models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), errors.New("error", "", 0)), - }, - { - description: "success when updating the device name to same name", - uid: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - tenant: "00000000-0000-0000-0000-000000000000", - name: toPointer("name"), - publicKey: nil, - requiredMocks: func(ctx context.Context) { - mock.On("DeviceGetByUID", ctx, models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - Name: "name", - }, nil).Once() - }, - expected: nil, - }, - { - description: "fails when name does not meet the validatino requirements", - uid: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - tenant: "00000000-0000-0000-0000-000000000000", - name: toPointer(""), - publicKey: nil, - requiredMocks: func(ctx context.Context) { - mock.On("DeviceGetByUID", ctx, models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - Name: "name", - }, nil).Once() - }, - expected: NewErrDeviceInvalid(map[string]interface{}{"name": ""}, nil), - }, - { - description: "fails when could not get the device by name", - uid: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - tenant: "00000000-0000-0000-0000-000000000000", - name: toPointer("same"), - publicKey: nil, - requiredMocks: func(ctx context.Context) { - mock.On("DeviceGetByUID", ctx, models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - Name: "name", - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "same", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound(models.UID("same"), fmt.Errorf("failed to get device by name: %w", errors.New("error", "", 0))), - }, - { - description: "fails when already exists a device with same name", - uid: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - tenant: "00000000-0000-0000-0000-000000000000", - name: toPointer("same"), - publicKey: nil, - requiredMocks: func(ctx context.Context) { - mock.On("DeviceGetByUID", ctx, models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - Name: "name", - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "same", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(&models.Device{ - UID: "fb2de504e98d3ccab342b53d83395cd7fda297c71e8da550c31478bae0dbb8c5", - Name: "same", - }, nil).Once() - }, - expected: NewErrDeviceDuplicated("same", nil), - }, - { - description: "success when udpate device for a different name", - uid: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - tenant: "00000000-0000-0000-0000-000000000000", - name: other, - publicKey: new(bool), - requiredMocks: func(ctx context.Context) { - mock.On("DeviceGetByUID", ctx, models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e", - Name: "name", - }, nil).Once() - - mock.On("DeviceGetByName", ctx, "other", "00000000-0000-0000-0000-000000000000", models.DeviceStatusAccepted). - Return(nil, store.ErrNoDocuments).Once() - - mock.On("DeviceUpdate", ctx, "00000000-0000-0000-0000-000000000000", models.UID("d6c6a5e97217bbe4467eae46ab004695a766c5c43f70b95efd4b6a4d32b33c6e"), other, new(bool)). - Return(nil).Once() - }, - expected: nil, - }, - } + service := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) - for _, test := range tests { + for _, test := range cases { t.Run(test.description, func(t *testing.T) { ctx := context.Background() test.requiredMocks(ctx) - err := service.UpdateDevice(ctx, test.tenant, models.UID(test.uid), test.name, test.publicKey) + err := service.UpdateDevice(ctx, test.req) assert.Equal(t, test.expected, err) }) } } - -func TestUpdateDeviceStatus_other_than_accepted(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - cases := []struct { - description string - uid models.UID - status models.DeviceStatus - tenant string - requiredMocks func() - expected error - }{ - { - description: "fails when could not get the namespace", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrNamespaceNotFound("00000000-0000-0000-0000-000000000000", errors.New("error", "", 0)), - }, - { - description: "fails when could not get the devcie", - uid: models.UID("uid"), - tenant: "00000000-0000-0000-0000-000000000000", - status: "accepted", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrDeviceNotFound("uid", errors.New("error", "", 0)), - }, - { - description: "fails when device already accepted", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "accepted", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - }, - expected: NewErrDeviceStatusAccepted(nil), - }, - { - description: "fails when the intended status is pending, but store update fails", - uid: models.UID("uid"), - status: "pending", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("pending")). - Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success to update device status when the intended status is pending", - uid: models.UID("uid"), - status: "pending", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("pending")). - Return(nil).Once() - }, - expected: nil, - }, - { - description: "fails when the intended status is rejected, but store update fails", - uid: models.UID("uid"), - status: "rejected", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("rejected")). - Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success to update device status when the intended status is rejected", - uid: models.UID("uid"), - status: "rejected", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - - mock.On("DeviceUpdateStatus", ctx, models.UID("uid"), models.DeviceStatus("rejected")). - Return(nil).Once() - }, - expected: nil, - }, - { - description: "fails when the device is already accepted", - uid: models.UID("uid"), - status: "accepted", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "accepted", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - }, - expected: NewErrDeviceStatusAccepted(nil), - }, - { - description: "fails when the intended status is removed", - uid: models.UID("uid"), - status: "removed", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - }, - expected: NewErrDeviceStatusInvalid("removed", nil), - }, - { - description: "fails when the intended status is unused", - uid: models.UID("uid"), - status: "unused", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - }, - expected: NewErrDeviceStatusInvalid("unused", nil), - }, - { - description: "fails when the intended status is unknown", - uid: models.UID("uid"), - status: "unused", - tenant: "00000000-0000-0000-0000-000000000000", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "00000000-0000-0000-0000-000000000000"). - Return(&models.Namespace{ - TenantID: "00000000-0000-0000-0000-000000000000", - }, nil).Once() - - mock.On("DeviceGetByUID", ctx, models.UID("uid"), "00000000-0000-0000-0000-000000000000"). - Return(&models.Device{ - UID: "uid", - Name: "name", - TenantID: "00000000-0000-0000-0000-000000000000", - Status: "pending", - Identity: &models.DeviceIdentity{MAC: "mac"}, - CreatedAt: time.Time{}, - }, nil).Once() - }, - expected: NewErrDeviceStatusInvalid("unused", nil), - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.UpdateDeviceStatus(ctx, tc.tenant, tc.uid, tc.status) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} diff --git a/api/services/errors.go b/api/services/errors.go index be32131408c..c5024ac0e1a 100644 --- a/api/services/errors.go +++ b/api/services/errors.go @@ -1,7 +1,7 @@ package services import ( - "fmt" + stderrors "errors" "github.com/shellhub-io/shellhub/pkg/errors" "github.com/shellhub-io/shellhub/pkg/models" @@ -32,6 +32,10 @@ const ( // ErrCodeNoContentChange is the error that occurs when the store function does not change any resource. Generally used in // update methods. ErrCodeNoContentChange + // ErrCodeCreated is the error code to be used when the resource was created, but the following operations failed. + ErrCodeCreated + // ErrCodeNotImplemented is the error code to be used when the resource is not yet implemented. + ErrCodeNotImplemented ) // ErrDataNotFound structure should be used to add errors.Data to an error when the resource is not found. @@ -60,77 +64,90 @@ type ErrDataInvalid struct { } var ( - ErrReport = errors.New("report error", ErrLayer, ErrCodeInvalid) - ErrPaymentRequired = errors.New("payment required", ErrLayer, ErrCodePayment) - ErrEvaluate = errors.New("evaluate error", ErrLayer, ErrCodeInvalid) - ErrNoContentChange = errors.New("no content change", ErrLayer, ErrCodeNoContentChange) - ErrNotFound = errors.New("not found", ErrLayer, ErrCodeNotFound) - ErrBadRequest = errors.New("bad request", ErrLayer, ErrCodeInvalid) - ErrUnauthorized = errors.New("unauthorized", ErrLayer, ErrCodeInvalid) - ErrForbidden = errors.New("forbidden", ErrLayer, ErrCodeNotFound) - ErrUserNotFound = errors.New("user not found", ErrLayer, ErrCodeNotFound) - ErrUserInvalid = errors.New("user invalid", ErrLayer, ErrCodeInvalid) - ErrUserDuplicated = errors.New("user duplicated", ErrLayer, ErrCodeDuplicated) - ErrUserPasswordInvalid = errors.New("user password invalid", ErrLayer, ErrCodeInvalid) - ErrUserPasswordDuplicated = errors.New("user password is equal to new password", ErrLayer, ErrCodeDuplicated) - ErrUserPasswordNotMatch = errors.New("user password does not match to the current password", ErrLayer, ErrCodeInvalid) - ErrUserNotConfirmed = errors.New("user not confirmed", ErrLayer, ErrCodeForbidden) - ErrUserUpdate = errors.New("user update", ErrLayer, ErrCodeStore) - ErrNamespaceNotFound = errors.New("namespace not found", ErrLayer, ErrCodeNotFound) - ErrNamespaceInvalid = errors.New("namespace invalid", ErrLayer, ErrCodeInvalid) - ErrNamespaceList = errors.New("namespace member list", ErrLayer, ErrCodeNotFound) - ErrNamespaceDuplicated = errors.New("namespace duplicated", ErrLayer, ErrCodeDuplicated) - ErrNamespaceMemberNotFound = errors.New("member not found", ErrLayer, ErrCodeNotFound) - ErrNamespaceMemberInvalid = errors.New("member invalid", ErrLayer, ErrCodeInvalid) - ErrNamespaceMemberFillData = errors.New("member fill data", ErrLayer, ErrCodeInvalid) - ErrNamespaceMemberDuplicated = errors.New("member duplicated", ErrLayer, ErrCodeDuplicated) - ErrNamespaceCreateStore = errors.New("namespace create store", ErrLayer, ErrCodeStore) - ErrMaxTagReached = errors.New("tag limit reached", ErrLayer, ErrCodeLimit) - ErrDuplicateTagName = errors.New("tag duplicated", ErrLayer, ErrCodeDuplicated) - ErrTagNameNotFound = errors.New("tag not found", ErrLayer, ErrCodeNotFound) - ErrTagInvalid = errors.New("tag invalid", ErrLayer, ErrCodeInvalid) - ErrNoTags = errors.New("no tags has found", ErrLayer, ErrCodeNotFound) - ErrConflictName = errors.New("name duplicated", ErrLayer, ErrCodeDuplicated) - ErrInvalidFormat = errors.New("invalid format", ErrLayer, ErrCodeInvalid) - ErrDeviceNotFound = errors.New("device not found", ErrLayer, ErrCodeNotFound) - ErrDeviceInvalid = errors.New("device invalid", ErrLayer, ErrCodeInvalid) - ErrDeviceDuplicated = errors.New("device duplicated", ErrLayer, ErrCodeDuplicated) - ErrDeviceLookupNotFound = errors.New("device lookup not found", ErrLayer, ErrCodeNotFound) - ErrDeviceLimit = errors.New("device limit reached", ErrLayer, ErrCodePayment) - ErrDeviceStatusInvalid = errors.New("device status invalid", ErrLayer, ErrCodeInvalid) - ErrDeviceStatusAccepted = errors.New("device status accepted", ErrLayer, ErrCodeInvalid) - ErrDeviceCreate = errors.New("device create", ErrLayer, ErrCodeStore) - ErrDeviceSetOnline = errors.New("device set online", ErrLayer, ErrCodeStore) - ErrMaxDeviceCountReached = errors.New("maximum number of accepted devices reached", ErrLayer, ErrCodeLimit) - ErrDuplicatedDeviceName = errors.New("device name duplicated", ErrLayer, ErrCodeDuplicated) - ErrPublicKeyDuplicated = errors.New("public key duplicated", ErrLayer, ErrCodeDuplicated) - ErrPublicKeyNotFound = errors.New("public key not found", ErrLayer, ErrCodeNotFound) - ErrPublicKeyInvalid = errors.New("public key invalid", ErrLayer, ErrCodeInvalid) - ErrPublicKeyNoTags = errors.New("public key has no tags", ErrLayer, ErrCodeInvalid) - ErrPublicKeyDataInvalid = errors.New("public key data invalid", ErrLayer, ErrCodeInvalid) - ErrPublicKeyFilter = errors.New("public key cannot have more than one filter at same time", ErrLayer, ErrCodeInvalid) - ErrTokenSigned = errors.New("token signed", ErrLayer, ErrCodeInvalid) - ErrTypeAssertion = errors.New("type assertion failed", ErrLayer, ErrCodeInvalid) - ErrSessionNotFound = errors.New("session not found", ErrLayer, ErrCodeNotFound) - ErrAuthInvalid = errors.New("auth invalid", ErrLayer, ErrCodeInvalid) - ErrAuthUnathorized = errors.New("auth unauthorized", ErrLayer, ErrCodeUnauthorized) - ErrNamespaceLimitReached = errors.New("namespace limit reached", ErrLayer, ErrCodeLimit) - ErrDeviceRemovedCount = errors.New("device removed count", ErrLayer, ErrCodeNotFound) - ErrDeviceRemovedInsert = errors.New("device removed insert", ErrLayer, ErrCodeStore) - ErrDeviceRemovedFull = errors.New("device removed full", ErrLayer, ErrCodePayment) - ErrDeviceRemovedDelete = errors.New("device removed delete", ErrLayer, ErrCodeStore) - ErrDeviceRemovedGet = errors.New("device removed get", ErrLayer, ErrCodeNotFound) - ErrBillingReportNamespaceDelete = errors.New("billing report namespace delete", ErrLayer, ErrCodePayment) - ErrBillingReportDevice = errors.New("billing report device", ErrLayer, ErrCodePayment) - ErrBillingEvaluate = errors.New("billing evaluate", ErrLayer, ErrCodePayment) - ErrSameTags = errors.New("trying to update tags with the same content", ErrLayer, ErrCodeNoContentChange) + ErrReport = errors.New("report error", ErrLayer, ErrCodeInvalid) + ErrPaymentRequired = errors.New("payment required", ErrLayer, ErrCodePayment) + ErrEvaluate = errors.New("evaluate error", ErrLayer, ErrCodeInvalid) + ErrNoContentChange = errors.New("no content change", ErrLayer, ErrCodeNoContentChange) + ErrNotFound = errors.New("not found", ErrLayer, ErrCodeNotFound) + ErrBadRequest = errors.New("bad request", ErrLayer, ErrCodeInvalid) + ErrUnauthorized = errors.New("unauthorized", ErrLayer, ErrCodeInvalid) + ErrForbidden = errors.New("forbidden", ErrLayer, ErrCodeNotFound) + ErrUserNotFound = errors.New("user not found", ErrLayer, ErrCodeNotFound) + ErrUserInvalid = errors.New("user invalid", ErrLayer, ErrCodeInvalid) + ErrUserDuplicated = errors.New("user duplicated", ErrLayer, ErrCodeDuplicated) + ErrUserPasswordInvalid = errors.New("user password invalid", ErrLayer, ErrCodeInvalid) + ErrUserPasswordDuplicated = errors.New("user password is equal to new password", ErrLayer, ErrCodeDuplicated) + ErrUserPasswordNotMatch = errors.New("user password does not match to the current password", ErrLayer, ErrCodeInvalid) + ErrUserNotConfirmed = errors.New("user not confirmed", ErrLayer, ErrCodeForbidden) + ErrUserUpdate = errors.New("user update", ErrLayer, ErrCodeStore) + ErrNamespaceNotFound = errors.New("namespace not found", ErrLayer, ErrCodeNotFound) + ErrNamespaceInvalid = errors.New("namespace invalid", ErrLayer, ErrCodeInvalid) + ErrNamespaceList = errors.New("namespace member list", ErrLayer, ErrCodeNotFound) + ErrNamespaceDuplicated = errors.New("namespace duplicated", ErrLayer, ErrCodeDuplicated) + ErrNamespaceMemberNotFound = errors.New("member not found", ErrLayer, ErrCodeNotFound) + ErrNamespaceMemberInvalid = errors.New("member invalid", ErrLayer, ErrCodeInvalid) + ErrNamespaceMemberFillData = errors.New("member fill data", ErrLayer, ErrCodeInvalid) + ErrNamespaceMemberDuplicated = errors.New("member duplicated", ErrLayer, ErrCodeDuplicated) + ErrNamespaceCreateStore = errors.New("namespace create store", ErrLayer, ErrCodeStore) + ErrMaxTagReached = errors.New("tag limit reached", ErrLayer, ErrCodeLimit) + ErrDuplicateTagName = errors.New("tag duplicated", ErrLayer, ErrCodeDuplicated) + ErrTagNameNotFound = errors.New("tag not found", ErrLayer, ErrCodeNotFound) + ErrTagInvalid = errors.New("tag invalid", ErrLayer, ErrCodeInvalid) + ErrNoTags = errors.New("no tags has found", ErrLayer, ErrCodeNotFound) + ErrConflictName = errors.New("name duplicated", ErrLayer, ErrCodeDuplicated) + ErrInvalidFormat = errors.New("invalid format", ErrLayer, ErrCodeInvalid) + ErrDeviceNotFound = errors.New("device not found", ErrLayer, ErrCodeNotFound) + ErrDeviceInvalid = errors.New("device invalid", ErrLayer, ErrCodeInvalid) + ErrDeviceDuplicated = errors.New("device duplicated", ErrLayer, ErrCodeDuplicated) + ErrDeviceLimit = errors.New("device limit reached", ErrLayer, ErrCodePayment) + ErrDeviceStatusInvalid = errors.New("device status invalid", ErrLayer, ErrCodeInvalid) + ErrDeviceStatusAccepted = errors.New("device status accepted", ErrLayer, ErrCodeInvalid) + ErrDeviceCreate = errors.New("device create", ErrLayer, ErrCodeStore) + ErrDeviceSetOnline = errors.New("device set online", ErrLayer, ErrCodeStore) + ErrMaxDeviceCountReached = errors.New("maximum number of accepted devices reached", ErrLayer, ErrCodeLimit) + ErrDuplicatedDeviceName = errors.New("device name duplicated", ErrLayer, ErrCodeDuplicated) + ErrPublicKeyDuplicated = errors.New("public key duplicated", ErrLayer, ErrCodeDuplicated) + ErrPublicKeyNotFound = errors.New("public key not found", ErrLayer, ErrCodeNotFound) + ErrPublicKeyInvalid = errors.New("public key invalid", ErrLayer, ErrCodeInvalid) + ErrPublicKeyNoTags = errors.New("public key has no tags", ErrLayer, ErrCodeInvalid) + ErrPublicKeyDataInvalid = errors.New("public key data invalid", ErrLayer, ErrCodeInvalid) + ErrPublicKeyFilter = errors.New("public key cannot have more than one filter at same time", ErrLayer, ErrCodeInvalid) + ErrTokenSigned = errors.New("token signed", ErrLayer, ErrCodeInvalid) + ErrTypeAssertion = errors.New("type assertion failed", ErrLayer, ErrCodeInvalid) + ErrSessionNotFound = errors.New("session not found", ErrLayer, ErrCodeNotFound) + ErrAuthInvalid = errors.New("auth invalid", ErrLayer, ErrCodeInvalid) + ErrAuthUnathorized = errors.New("auth unauthorized", ErrLayer, ErrCodeUnauthorized) + ErrNamespaceLimitReached = errors.New("namespace limit reached", ErrLayer, ErrCodeLimit) + ErrNamespaceCreationIsForbidden = errors.New("namespace creation not permitted for user", ErrLayer, ErrCodeForbidden) + ErrDeviceRemovedFull = errors.New("device removed full", ErrLayer, ErrCodePayment) + ErrBillingReportNamespaceDelete = errors.New("billing report namespace delete", ErrLayer, ErrCodePayment) + ErrBillingReportDevice = errors.New("billing report device", ErrLayer, ErrCodePayment) + ErrBillingEvaluate = errors.New("billing evaluate", ErrLayer, ErrCodePayment) + ErrSameTags = errors.New("trying to update tags with the same content", ErrLayer, ErrCodeNoContentChange) + ErrAPIKeyNotFound = errors.New("APIKey not found", ErrLayer, ErrCodeNotFound) + ErrAPIKeyDuplicated = errors.New("APIKey duplicated", ErrLayer, ErrCodeDuplicated) + ErrAuthForbidden = errors.New("user is authenticated but cannot access this resource", ErrLayer, ErrCodeForbidden) + ErrRoleInvalid = errors.New("role is invalid", ErrLayer, ErrCodeForbidden) + ErrUserDelete = errors.New("user couldn't be deleted", ErrLayer, ErrCodeInvalid) + ErrSetupForbidden = errors.New("setup isn't allowed anymore", ErrLayer, ErrCodeForbidden) + ErrAuthMethodNotAllowed = errors.New("auth method not allowed", ErrLayer, ErrCodeNotImplemented) + ErrAuthDeviceNoIdentityAndHostname = errors.New("device doesn't have identity neither hostname defined", ErrLayer, ErrCodeInvalid) + ErruthDeviceNoIdentity = errors.New("device doesn't have identity defined", ErrLayer, ErrCodeInvalid) ) +func NewErrRoleInvalid() error { + return ErrRoleInvalid +} + // NewErrNotFound returns an error with the ErrDataNotFound and wrap an error. func NewErrNoContentChange(err error, next error) error { return errors.Wrap(err, next) } +func NewErrAuthMethodNotAllowed(method string) error { + return errors.Wrap(ErrAuthMethodNotAllowed, stderrors.New("method"+method+"not allowed")) +} + // NewErrNotFound returns an error with the ErrDataNotFound and wrap an error. func NewErrNotFound(err error, id string, next error) error { return errors.Wrap(errors.WithData(err, ErrDataNotFound{ID: id}), next) @@ -179,6 +196,20 @@ func NewErrNamespaceNotFound(id string, next error) error { return NewErrNotFound(ErrNamespaceNotFound, id, next) } +// NewErrAPIKeyNotFound returns an error when the APIKey is not found. +func NewErrAPIKeyNotFound(name string, next error) error { + return NewErrNotFound(ErrAPIKeyNotFound, name, next) +} + +func NewErrAPIKeyInvalid(name string) error { + return NewErrAuthInvalid(map[string]interface{}{"api-key": name}, nil) +} + +// NewErrAPIKeyDuplicated returns an error when the APIKey name is duplicated. +func NewErrAPIKeyDuplicated(conflicts []string) error { + return NewErrDuplicated(ErrAPIKeyDuplicated, conflicts, nil) +} + // NewErrTagInvalid returns an error when the tag is invalid. func NewErrTagInvalid(tag string, next error) error { return NewErrInvalid(ErrTagInvalid, map[string]interface{}{"name": tag}, next) @@ -341,11 +372,6 @@ func NewErrDeviceDuplicated(name string, next error) error { return NewErrDuplicated(ErrDeviceDuplicated, []string{name}, next) } -// NewErrDeviceLookupNotFound returns an error to be used when the device lookup is not found. -func NewErrDeviceLookupNotFound(namespace, name string, next error) error { - return NewErrNotFound(ErrDeviceLookupNotFound, fmt.Sprintf("device %s on namespace %s", name, namespace), next) -} - // NewErrDeviceLimit returns an error to be used when the device limit is reached. func NewErrDeviceLimit(limit int, next error) error { return NewErrLimit(ErrDeviceLimit, limit, next) @@ -408,26 +434,15 @@ func NewErrNamespaceLimitReached(limit int, err error) error { return NewErrLimit(ErrNamespaceLimitReached, limit, err) } -func NewErrDeviceRemovedCount(next error) error { - return NewErrInvalid(ErrDeviceRemovedCount, nil, next) -} - -func NewErrDeviceRemovedInsert(next error) error { - return NewErrInvalid(ErrDeviceRemovedInsert, nil, next) +// NewErrNamespaceCreationIsForbidden a error, since user have no permition to add a new namespace +func NewErrNamespaceCreationIsForbidden(limit int, err error) error { + return NewErrLimit(ErrNamespaceCreationIsForbidden, limit, err) } func NewErrDeviceRemovedFull(limit int, next error) error { return NewErrLimit(ErrDeviceRemovedFull, limit, next) } -func NewErrDeviceRemovedDelete(next error) error { - return NewErrInvalid(ErrDeviceRemovedDelete, nil, next) -} - -func NewErrDeviceRemovedGet(next error) error { - return NewErrInvalid(ErrDeviceRemovedGet, nil, next) -} - func NewErrBillingReportNamespaceDelete(next error) error { return NewErrInvalid(ErrBillingReportNamespaceDelete, nil, next) } @@ -443,3 +458,23 @@ func NewErrBillingEvaluate(next error) error { func NewErrDeviceMaxDevicesReached(count int) error { return NewErrLimit(ErrMaxDeviceCountReached, count, nil) } + +func NewErrAuthForbidden() error { + return NewErrForbidden(ErrAuthForbidden, nil) +} + +func NewErrUserDelete(err error) error { + return NewErrInvalid(ErrUserDelete, nil, err) +} + +func NewErrSetupForbidden(err error) error { + return NewErrForbidden(ErrSetupForbidden, err) +} + +func NewErrAuthDeviceNoIdentityAndHostname() error { + return NewErrInvalid(ErrAuthDeviceNoIdentityAndHostname, map[string]interface{}{}, nil) +} + +func NewErrAuthDeviceNoIdentity() error { + return NewErrInvalid(ErruthDeviceNoIdentity, map[string]interface{}{"identity": true}, nil) +} diff --git a/api/services/init_test.go b/api/services/init_test.go index 182dcd3d8a2..8a90fcd8159 100644 --- a/api/services/init_test.go +++ b/api/services/init_test.go @@ -12,6 +12,8 @@ import ( clockmocks "github.com/shellhub-io/shellhub/pkg/clock/mocks" "github.com/shellhub-io/shellhub/pkg/envs" env_mocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/hash" + hashmock "github.com/shellhub-io/shellhub/pkg/hash/mocks" ) var ( @@ -20,6 +22,7 @@ var ( clientMock *mocks.Client envMock *env_mocks.Backend clockMock *clockmocks.Clock + hashMock *hashmock.Hasher now time.Time ) @@ -31,6 +34,8 @@ func TestMain(m *testing.M) { envMock = &env_mocks.Backend{} clock.DefaultBackend = clockMock envs.DefaultBackend = envMock + hashMock = &hashmock.Hasher{} + hash.Backend = hashMock now = time.Now() code := m.Run() os.Exit(code) diff --git a/api/services/member.go b/api/services/member.go new file mode 100644 index 00000000000..1f6285c07a7 --- /dev/null +++ b/api/services/member.go @@ -0,0 +1,334 @@ +package services + +import ( + "context" + "errors" + "strings" + "time" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/api/store/mongo" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" +) + +type MemberService interface { + // EditNamespace updates a namespace for the specified requests.NamespaceEdit#Tenant. + // It returns the namespace with the updated fields and an error, if any. + EditNamespace(ctx context.Context, req *requests.NamespaceEdit) (*models.Namespace, error) + + // AddNamespaceMember adds a member to a namespace. + // + // In cloud environments, a membership invitation is created with pending status until they accept the invite via + // an invitation email. If the target user does not exist, the email will redirect them to the registration page, + // and the invite can be accepted after finishing. In community and enterprise environments, the member is added + // directly to the namespace without sending an email. + // + // The role assigned to the new member must not grant more authority than the user adding them (e.g., + // an administrator cannot add a member with a higher role such as an owner). Owners cannot be created. + // + // It returns the namespace and an error, if any. + AddNamespaceMember(ctx context.Context, req *requests.NamespaceAddMember) (*models.Namespace, error) + + // UpdateNamespaceMember updates a member with the specified ID in the specified namespace. The member's role cannot + // have more authority than the user who is updating the member; owners cannot be created. + // + // It returns an error, if any. + UpdateNamespaceMember(ctx context.Context, req *requests.NamespaceUpdateMember) error + + // RemoveNamespaceMember removes a specified member from a namespace. The action must be performed by a user with higher + // authority than the target member. Owners cannot be removed. + // + // Returns the updated namespace and an error, if any. + RemoveNamespaceMember(ctx context.Context, req *requests.NamespaceRemoveMember) (*models.Namespace, error) + + // LeaveNamespace allows an authenticated user to remove themselves from a namespace. Owners cannot leave a namespace. + // If the user attempts to leave the namespace they are authenticated to, their authentication token will be invalidated. + // Returns an error, if any. + LeaveNamespace(ctx context.Context, req *requests.LeaveNamespace) (*models.UserAuthResponse, error) +} + +func (s *service) AddNamespaceMember(ctx context.Context, req *requests.NamespaceAddMember) (*models.Namespace, error) { + namespace, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil || namespace == nil { + return nil, NewErrNamespaceNotFound(req.TenantID, err) + } + + user, err := s.store.UserResolve(ctx, store.UserIDResolver, req.UserID) + if err != nil || user == nil { + return nil, NewErrUserNotFound(req.UserID, err) + } + + // checks if the active member is in the namespace. user is the active member. + active, ok := namespace.FindMember(user.ID) + if !ok { + return nil, NewErrNamespaceMemberNotFound(user.ID, err) + } + + if !active.Role.HasAuthority(req.MemberRole) { + return nil, NewErrRoleInvalid() + } + + // In cloud instances, if the target user does not exist, we need to create a new user + // with the specified email. We use the inserted ID to identify the user once they complete + // the registration and accepts the invitation. + passiveUser, err := s.store.UserResolve(ctx, store.UserEmailResolver, strings.ToLower(req.MemberEmail)) + if err != nil { + if !envs.IsCloud() || !errors.Is(err, store.ErrNoDocuments) { + return nil, NewErrUserNotFound(req.MemberEmail, err) + } + + passiveUser = &models.User{} + passiveUser.ID, err = s.store.UserInvitationsUpsert(ctx, strings.ToLower(req.MemberEmail)) + if err != nil { + return nil, err + } + } + + if _, ok := namespace.FindMember(passiveUser.ID); ok { + return nil, NewErrNamespaceMemberDuplicated(passiveUser.ID, nil) + } + + var callback store.TransactionCb + if !envs.IsCloud() { + callback = s.addMember(namespace, passiveUser.ID, req) + } else { + invitation, err := s.store.MembershipInvitationResolve(ctx, req.TenantID, passiveUser.ID) + if err != nil && !errors.Is(err, store.ErrNoDocuments) { + return nil, err + } + + switch { + case invitation == nil, !invitation.IsPending(): + callback = s.addMember(namespace, passiveUser.ID, req) + case invitation.IsExpired(): + callback = s.resendMembershipInvite(invitation, req) + default: + return nil, NewErrNamespaceMemberDuplicated(passiveUser.ID, nil) + } + } + + if err := s.store.WithTransaction(ctx, callback); err != nil { + return nil, err + } + + n, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil { + return nil, err + } + + return n, nil +} + +// addMember returns a transaction callback that adds a member to a namespace. +// +// In all environments, it creates a membership_invitation record for audit purposes: +// - Cloud: Creates pending invitation with expiration and sends email +// - Community/Enterprise: Creates accepted invitation and adds member directly to namespace +func (s *service) addMember(namespace *models.Namespace, userID string, req *requests.NamespaceAddMember) store.TransactionCb { + return func(ctx context.Context) error { + now := clock.Now() + + invitation := &models.MembershipInvitation{ + TenantID: req.TenantID, + UserID: userID, + InvitedBy: namespace.Owner, + Role: req.MemberRole, + CreatedAt: now, + UpdatedAt: now, + StatusUpdatedAt: now, + Invitations: 1, + } + + if envs.IsCloud() { + expiresAt := now.Add(7 * (24 * time.Hour)) + invitation.Status = models.MembershipInvitationStatusPending + invitation.ExpiresAt = &expiresAt + if err := s.store.MembershipInvitationCreate(ctx, invitation); err != nil { + return err + } + + if err := s.client.InviteMember(ctx, req.TenantID, userID, req.FowardedHost); err != nil { + return err + } + } else { + invitation.Status = models.MembershipInvitationStatusAccepted + invitation.ExpiresAt = nil + if err := s.store.MembershipInvitationCreate(ctx, invitation); err != nil { + return err + } + + member := &models.Member{ID: userID, AddedAt: now, Role: req.MemberRole} + if err := s.store.NamespaceCreateMembership(ctx, req.TenantID, member); err != nil { + return err + } + } + + return nil + } +} + +// resendMembershipInvite returns a transaction callback that resends a membership invitation. +// +// This function updates an existing invitation to pending status, extends the expiration date, +// increments the invitation counter, and sends a new invitation email (cloud only). +func (s *service) resendMembershipInvite(invitation *models.MembershipInvitation, req *requests.NamespaceAddMember) store.TransactionCb { + return func(ctx context.Context) error { + now := clock.Now() + + expiresAt := now.Add(7 * (24 * time.Hour)) + invitation.Status = models.MembershipInvitationStatusPending + invitation.Role = req.MemberRole + invitation.ExpiresAt = &expiresAt + invitation.UpdatedAt = now + invitation.StatusUpdatedAt = now + invitation.Invitations++ + + if err := s.store.MembershipInvitationUpdate(ctx, invitation); err != nil { + return err + } + + return s.client.InviteMember(ctx, req.TenantID, invitation.UserID, req.FowardedHost) + } +} + +func (s *service) UpdateNamespaceMember(ctx context.Context, req *requests.NamespaceUpdateMember) error { + namespace, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil { + return NewErrNamespaceNotFound(req.TenantID, err) + } + + user, err := s.store.UserResolve(ctx, store.UserIDResolver, req.UserID) + if err != nil { + return NewErrUserNotFound(req.UserID, err) + } + + active, ok := namespace.FindMember(user.ID) + if !ok { + return NewErrNamespaceMemberNotFound(user.ID, err) + } + + member, ok := namespace.FindMember(req.MemberID) + if !ok { + return NewErrNamespaceMemberNotFound(req.MemberID, err) + } + + if req.MemberRole != authorizer.RoleInvalid { + if !active.Role.HasAuthority(req.MemberRole) { + return NewErrRoleInvalid() + } + + member.Role = req.MemberRole + } + + if err := s.store.NamespaceUpdateMembership(ctx, req.TenantID, member); err != nil { + return err + } + + s.AuthUncacheToken(ctx, namespace.TenantID, req.MemberID) // nolint: errcheck + + return nil +} + +func (s *service) RemoveNamespaceMember(ctx context.Context, req *requests.NamespaceRemoveMember) (*models.Namespace, error) { + namespace, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil { + return nil, NewErrNamespaceNotFound(req.TenantID, err) + } + + user, err := s.store.UserResolve(ctx, store.UserIDResolver, req.UserID) + if err != nil { + return nil, NewErrUserNotFound(req.UserID, err) + } + + active, ok := namespace.FindMember(user.ID) + if !ok { + return nil, NewErrNamespaceMemberNotFound(user.ID, err) + } + + passive, ok := namespace.FindMember(req.MemberID) + if !ok { + return nil, NewErrNamespaceMemberNotFound(req.MemberID, err) + } + + if !active.Role.HasAuthority(passive.Role) { + return nil, NewErrRoleInvalid() + } + + if err := s.removeMember(ctx, namespace, passive); err != nil { //nolint:revive + return nil, err + } + + if err := s.AuthUncacheToken(ctx, req.TenantID, req.UserID); err != nil { + log.WithError(err). + WithField("tenant_id", req.TenantID). + WithField("user_id", req.UserID). + Error("failed to uncache the token") + } + + return s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) +} + +func (s *service) LeaveNamespace(ctx context.Context, req *requests.LeaveNamespace) (*models.UserAuthResponse, error) { + ns, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID) + if err != nil { + return nil, NewErrNamespaceNotFound(req.TenantID, err) + } + + member, ok := ns.FindMember(req.UserID) + if !ok || member.Role == authorizer.RoleOwner { + return nil, NewErrAuthForbidden() + } + + if err := s.removeMember(ctx, ns, member); err != nil { //nolint:revive + return nil, err + } + + // If the user is attempting to leave a namespace other than the authenticated one, + // there is no need to generate a new token. + if req.TenantID != req.AuthenticatedTenantID { + return nil, nil + } + + user, err := s.store.UserResolve(ctx, store.UserIDResolver, req.UserID) + if user == nil { + return nil, NewErrUserNotFound(req.UserID, err) + } + + user.Preferences.PreferredNamespace = "" + if err := s.store.UserUpdate(ctx, user); err != nil { + log.WithError(err). + WithField("tenant_id", req.TenantID). + WithField("user_id", req.UserID). + Error("failed to reset user's preferred namespace") + } + + if err := s.AuthUncacheToken(ctx, req.TenantID, req.UserID); err != nil { + log.WithError(err). + WithField("tenant_id", req.TenantID). + WithField("user_id", req.UserID). + Error("failed to uncache the token") + } + + // TODO: make this method a util function + return s.CreateUserToken(ctx, &requests.CreateUserToken{UserID: req.UserID}) +} + +func (s *service) removeMember(ctx context.Context, ns *models.Namespace, member *models.Member) error { + if err := s.store.NamespaceDeleteMembership(ctx, ns.TenantID, member); err != nil { + switch { + case errors.Is(err, store.ErrNoDocuments): + return NewErrNamespaceNotFound(ns.TenantID, err) + case errors.Is(err, mongo.ErrUserNotFound): + return NewErrNamespaceMemberNotFound(member.ID, err) + default: + return err + } + } + + return nil +} diff --git a/api/services/member_test.go b/api/services/member_test.go new file mode 100644 index 00000000000..db7c8f7c9b3 --- /dev/null +++ b/api/services/member_test.go @@ -0,0 +1,1991 @@ +package services + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/shellhub-io/shellhub/api/store" + storemock "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/requests" + storecache "github.com/shellhub-io/shellhub/pkg/cache" + cachemock "github.com/shellhub-io/shellhub/pkg/cache/mocks" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestService_AddNamespaceMember(t *testing.T) { + type Expected struct { + namespace *models.Namespace + err error + } + + envMock := new(envmock.Backend) + storeMock := new(storemock.Store) + clockMock := new(clockmock.Clock) + + envs.DefaultBackend = envMock + clock.DefaultBackend = clockMock + + now := time.Now() + clockMock.On("Now").Return(now) + + cases := []struct { + description string + req *requests.NamespaceAddMember + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "[community|enterprise|cloud] fails when the namespace was not found", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, ErrNamespaceNotFound). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", ErrNamespaceNotFound), + }, + }, + { + description: "[community|enterprise|cloud] fails when the active member was not found", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(nil, ErrUserNotFound). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrUserNotFound("000000000000000000000000", ErrUserNotFound), + }, + }, + { + description: "[community|enterprise|cloud] fails when the active member is not on the namespace", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrNamespaceMemberNotFound("000000000000000000000000", nil), + }, + }, + { + description: "[community|enterprise|cloud] fails when the passive role's is owner", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleOwner, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOperator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrRoleInvalid(), + }, + }, + { + description: "[community|enterprise|cloud] fails when the active member's role cannot act over passive member's role", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleAdministrator, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOperator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrRoleInvalid(), + }, + }, + { + description: "[community|enterprise] fails when passive member was not found", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(nil, errors.New("error")). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrUserNotFound("john.doe@test.com", errors.New("error")), + }, + }, + { + description: "[community|enterprise|cloud] fails when the member is duplicated", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000001", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(&models.User{ + ID: "000000000000000000000001", + UserData: models.UserData{Username: "john_doe"}, + }, nil). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrNamespaceMemberDuplicated("000000000000000000000001", nil), + }, + }, + { + description: "[cloud] fails when the member has pending invitation not expired", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(&models.User{ + ID: "000000000000000000000001", + UserData: models.UserData{Username: "john_doe"}, + }, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("MembershipInvitationResolve", ctx, "00000000-0000-4000-0000-000000000000", "000000000000000000000001"). + Return( + &models.MembershipInvitation{ + TenantID: "00000000-0000-4000-0000-000000000000", + UserID: "000000000000000000000001", + Status: models.MembershipInvitationStatusPending, + ExpiresAt: &[]time.Time{time.Now().Add(14 * (24 * time.Hour))}[0], + }, + nil, + ). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrNamespaceMemberDuplicated("000000000000000000000001", nil), + }, + }, + { + description: "[community|enterprise] fails when cannot add the member", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(&models.User{ + ID: "000000000000000000000001", + UserData: models.UserData{Username: "john_doe"}, + }, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("WithTransaction", ctx, mock.Anything). + Return(errors.New("error")). + Once() + }, + expected: Expected{ + namespace: nil, + err: errors.New("error"), + }, + }, + { + description: "[community|enterprise] succeeds", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(&models.User{ + ID: "000000000000000000000001", + UserData: models.UserData{Username: "john_doe"}, + }, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("WithTransaction", ctx, mock.Anything). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000000", + Role: authorizer.RoleObserver, + }, + }, + }, nil). + Once() + }, + expected: Expected{ + namespace: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000000", + Role: authorizer.RoleObserver, + }, + }, + }, + err: nil, + }, + }, + { + description: "[cloud] succeeds", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(&models.User{ + ID: "000000000000000000000001", + UserData: models.UserData{Username: "john_doe"}, + }, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("MembershipInvitationResolve", ctx, "00000000-0000-4000-0000-000000000000", "000000000000000000000001"). + Return(nil, store.ErrNoDocuments). + Once() + storeMock. + On("WithTransaction", ctx, mock.Anything). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000000", + Role: authorizer.RoleObserver, + }, + }, + }, nil). + Once() + }, + expected: Expected{ + namespace: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000000", + Role: authorizer.RoleObserver, + }, + }, + }, + err: nil, + }, + }, + { + description: "[cloud] succeeds to resend the invite", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(&models.User{ + ID: "000000000000000000000001", + UserData: models.UserData{Username: "john_doe"}, + }, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("MembershipInvitationResolve", ctx, "00000000-0000-4000-0000-000000000000", "000000000000000000000001"). + Return(&models.MembershipInvitation{ + TenantID: "00000000-0000-4000-0000-000000000000", + UserID: "000000000000000000000001", + Status: models.MembershipInvitationStatusPending, + ExpiresAt: &[]time.Time{time.Date(2023, 0o1, 0o1, 12, 0o0, 0o0, 0o0, time.UTC)}[0], + }, nil). + Once() + storeMock. + On("WithTransaction", ctx, mock.Anything). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + }, + expected: Expected{ + namespace: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, + err: nil, + }, + }, + { + description: "[cloud] succeeds to create the user when not found", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserEmailResolver, "john.doe@test.com"). + Return(nil, store.ErrNoDocuments). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("UserInvitationsUpsert", ctx, "john.doe@test.com"). + Return("000000000000000000000001", nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("MembershipInvitationResolve", ctx, "00000000-0000-4000-0000-000000000000", "000000000000000000000001"). + Return(nil, store.ErrNoDocuments). + Once() + storeMock. + On("WithTransaction", ctx, mock.Anything). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + }, + expected: Expected{ + namespace: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, + err: nil, + }, + }, + } + + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.TODO() + tc.requiredMocks(ctx) + ns, err := s.AddNamespaceMember(ctx, tc.req) + assert.Equal(t, tc.expected, Expected{ns, err}) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestService_addMember(t *testing.T) { + envMock = new(envmock.Backend) + clockMock := new(clockmock.Clock) + storeMock := new(storemock.Store) + + envs.DefaultBackend = envMock + clock.DefaultBackend = clockMock + + now := time.Now() + clockMock.On("Now").Return(now) + + cases := []struct { + description string + namespace *models.Namespace + memberID string + req *requests.NamespaceAddMember + requiredMocks func(context.Context) + expected error + }{ + { + description: "[community|enterprise] fails when cannot create membership invitation", + namespace: &models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Owner: "000000000000000000000000"}, + memberID: "000000000000000000000000", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("MembershipInvitationCreate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusAccepted && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt == nil + })). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "[community|enterprise] fails when cannot create namespace membership", + namespace: &models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Owner: "000000000000000000000000"}, + memberID: "000000000000000000000000", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("MembershipInvitationCreate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusAccepted && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt == nil + })). + Return(nil). + Once() + storeMock. + On("NamespaceCreateMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000000", Role: authorizer.RoleObserver, AddedAt: now}). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "[community|enterprise] succeeds", + namespace: &models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Owner: "000000000000000000000000"}, + memberID: "000000000000000000000000", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("MembershipInvitationCreate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusAccepted && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt == nil + })). + Return(nil). + Once() + storeMock. + On("NamespaceCreateMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000000", Role: authorizer.RoleObserver, AddedAt: now}). + Return(nil). + Once() + }, + expected: nil, + }, + { + description: "[cloud] fails when cannot create membership invitation", + namespace: &models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Owner: "000000000000000000000000"}, + memberID: "000000000000000000000000", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("MembershipInvitationCreate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusPending && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt != nil + })). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "[cloud] fails cannot send the invite", + namespace: &models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Owner: "000000000000000000000000"}, + memberID: "000000000000000000000000", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("MembershipInvitationCreate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusPending && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt != nil + })). + Return(nil). + Once() + clientMock. + On("InviteMember", ctx, "00000000-0000-4000-0000-000000000000", "000000000000000000000000", "localhost"). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "[cloud] succeeds", + namespace: &models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000", Owner: "000000000000000000000000"}, + memberID: "000000000000000000000000", + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberEmail: "john.doe@test.com", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + storeMock. + On("MembershipInvitationCreate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusPending && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt != nil + })). + Return(nil). + Once() + clientMock. + On("InviteMember", ctx, "00000000-0000-4000-0000-000000000000", "000000000000000000000000", "localhost"). + Return(nil). + Once() + }, + expected: nil, + }, + } + + s := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + tc.requiredMocks(ctx) + + cb := s.addMember(tc.namespace, tc.memberID, tc.req) + assert.Equal(tt, tc.expected, cb(ctx)) + + storeMock.AssertExpectations(tt) + envMock.AssertExpectations(tt) + }) + } +} + +func TestService_resendMembershipInvite(t *testing.T) { + envMock = new(envmock.Backend) + storeMock := new(storemock.Store) + clockMock := new(clockmock.Clock) + + envs.DefaultBackend = envMock + clock.DefaultBackend = clockMock + + now := time.Now() + clockMock.On("Now").Return(now) + + cases := []struct { + description string + invitation *models.MembershipInvitation + req *requests.NamespaceAddMember + requiredMocks func(context.Context) + expected error + }{ + { + description: "[cloud] fails when cannot update the invitation", + invitation: &models.MembershipInvitation{ + TenantID: "00000000-0000-4000-0000-000000000000", + UserID: "000000000000000000000000", + Role: authorizer.RoleAdministrator, + Status: models.MembershipInvitationStatusPending, + CreatedAt: now.Add(-7 * (24 * time.Hour)), + ExpiresAt: &[]time.Time{now.Add(-1 * (24 * time.Hour))}[0], + Invitations: 1, + }, + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("MembershipInvitationUpdate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusPending && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt != nil && + invitation.Invitations == 2 + })). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "[cloud] fails when cannot send the invite", + invitation: &models.MembershipInvitation{ + TenantID: "00000000-0000-4000-0000-000000000000", + UserID: "000000000000000000000000", + Role: authorizer.RoleAdministrator, + Status: models.MembershipInvitationStatusPending, + CreatedAt: now.Add(-7 * (24 * time.Hour)), + ExpiresAt: &[]time.Time{now.Add(-1 * (24 * time.Hour))}[0], + Invitations: 1, + }, + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("MembershipInvitationUpdate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusPending && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt != nil && + invitation.Invitations == 2 + })). + Return(nil). + Once() + clientMock. + On("InviteMember", ctx, "00000000-0000-4000-0000-000000000000", "000000000000000000000000", "localhost"). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "[cloud] succeeds", + invitation: &models.MembershipInvitation{ + TenantID: "00000000-0000-4000-0000-000000000000", + UserID: "000000000000000000000000", + Role: authorizer.RoleAdministrator, + Status: models.MembershipInvitationStatusPending, + CreatedAt: now.Add(-7 * (24 * time.Hour)), + ExpiresAt: &[]time.Time{now.Add(-1 * (24 * time.Hour))}[0], + Invitations: 1, + }, + req: &requests.NamespaceAddMember{ + FowardedHost: "localhost", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("MembershipInvitationUpdate", ctx, mock.MatchedBy(func(invitation *models.MembershipInvitation) bool { + return invitation.TenantID == "00000000-0000-4000-0000-000000000000" && + invitation.UserID == "000000000000000000000000" && + invitation.Status == models.MembershipInvitationStatusPending && + invitation.Role == authorizer.RoleObserver && + invitation.ExpiresAt != nil && + invitation.Invitations == 2 + })). + Return(nil). + Once() + clientMock. + On("InviteMember", ctx, "00000000-0000-4000-0000-000000000000", "000000000000000000000000", "localhost"). + Return(nil). + Once() + }, + expected: nil, + }, + } + + s := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + tc.requiredMocks(ctx) + + cb := s.resendMembershipInvite(tc.invitation, tc.req) + assert.Equal(tt, tc.expected, cb(ctx)) + + storeMock.AssertExpectations(tt) + envMock.AssertExpectations(tt) + }) + } +} + +func TestService_UpdateNamespaceMember(t *testing.T) { + envMock := new(envmock.Backend) + storeMock := new(storemock.Store) + + envs.DefaultBackend = envMock + + cases := []struct { + description string + req *requests.NamespaceUpdateMember + requiredMocks func(context.Context) + expected error + }{ + { + description: "[community|enterprise|cloud] fails when the namespace was not found", + req: &requests.NamespaceUpdateMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, ErrNamespaceNotFound). + Once() + }, + expected: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", ErrNamespaceNotFound), + }, + { + description: "[community|enterprise|cloud] fails when the active member was not found", + req: &requests.NamespaceUpdateMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(nil, ErrUserNotFound). + Once() + }, + expected: NewErrUserNotFound("000000000000000000000000", ErrUserNotFound), + }, + { + description: "[community|enterprise|cloud] fails when the active member is not on the namespace", + req: &requests.NamespaceUpdateMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: NewErrNamespaceMemberNotFound("000000000000000000000000", nil), + }, + { + description: "[community|enterprise] fails when the passive member is not on the namespace", + req: &requests.NamespaceUpdateMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + MemberRole: authorizer.RoleObserver, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: NewErrNamespaceMemberNotFound("000000000000000000000001", nil), + }, + { + description: "[community|enterprise|cloud] fails when the passive role's is owner", + req: &requests.NamespaceUpdateMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + MemberRole: authorizer.RoleOwner, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000001", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: NewErrRoleInvalid(), + }, + { + description: "[community|enterprise|cloud] fails when the active member's role cannot act over passive member's role", + req: &requests.NamespaceUpdateMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + MemberRole: authorizer.RoleAdministrator, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOperator, + }, + { + ID: "000000000000000000000001", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: NewErrRoleInvalid(), + }, + { + description: "[community|enterprise|cloud] fails when cannot update the member", + req: &requests.NamespaceUpdateMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + MemberRole: authorizer.RoleAdministrator, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000001", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("NamespaceUpdateMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000001", Role: authorizer.RoleAdministrator}). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "[community|enterprise|cloud] succeeds", + req: &requests.NamespaceUpdateMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + MemberRole: authorizer.RoleAdministrator, + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000001", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("NamespaceUpdateMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000001", Role: authorizer.RoleAdministrator}). + Return(nil). + Once() + }, + expected: nil, + }, + } + + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.TODO() + tc.requiredMocks(ctx) + err := s.UpdateNamespaceMember(ctx, tc.req) + assert.Equal(t, tc.expected, err) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestService_RemoveNamespaceMember(t *testing.T) { + type Expected struct { + namespace *models.Namespace + err error + } + + envMock := new(envmock.Backend) + storeMock := new(storemock.Store) + + envs.DefaultBackend = envMock + + cases := []struct { + description string + req *requests.NamespaceRemoveMember + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "[community|enterprise|cloud] fails when the namespace was not found", + req: &requests.NamespaceRemoveMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, ErrNamespaceNotFound). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", ErrNamespaceNotFound), + }, + }, + { + description: "[community|enterprise|cloud] fails when the active member was not found", + req: &requests.NamespaceRemoveMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(nil, ErrUserNotFound). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrUserNotFound("000000000000000000000000", ErrUserNotFound), + }, + }, + { + description: "[community|enterprise|cloud] fails when the active member is not on the namespace", + req: &requests.NamespaceRemoveMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{}, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrNamespaceMemberNotFound("000000000000000000000000", nil), + }, + }, + { + description: "[community|enterprise] fails when the passive member is not on the namespace", + req: &requests.NamespaceRemoveMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrNamespaceMemberNotFound("000000000000000000000001", nil), + }, + }, + { + description: "[community|enterprise|cloud] fails when the active member's role cannot act over passive member's role", + req: &requests.NamespaceRemoveMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOperator, + }, + { + ID: "000000000000000000000001", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + }, + expected: Expected{ + namespace: nil, + err: NewErrRoleInvalid(), + }, + }, + { + description: "[community|enterprise|cloud] fails when cannot remove the member", + req: &requests.NamespaceRemoveMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000001", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("NamespaceDeleteMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000001", Role: authorizer.RoleAdministrator}). + Return(errors.New("error")). + Once() + }, + expected: Expected{ + namespace: nil, + err: errors.New("error"), + }, + }, + { + description: "[community|enterprise|cloud] succeeds", + req: &requests.NamespaceRemoveMember{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + MemberID: "000000000000000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + { + ID: "000000000000000000000001", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{Username: "jane_doe"}, + }, nil). + Once() + storeMock. + On("NamespaceDeleteMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000001", Role: authorizer.RoleAdministrator}). + Return(nil). + Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + }, + expected: Expected{ + namespace: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, + err: nil, + }, + }, + } + + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.TODO() + tc.requiredMocks(ctx) + ns, err := s.RemoveNamespaceMember(ctx, tc.req) + assert.Equal(t, tc.expected, Expected{ns, err}) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestService_LeaveNamespace(t *testing.T) { + type Expected struct { + res *models.UserAuthResponse + err error + } + + storeMock := new(storemock.Store) + cacheMock := new(cachemock.Cache) + + cases := []struct { + description string + req *requests.LeaveNamespace + requiredMocks func(context.Context) + expected Expected + }{ + { + description: "fails when the namespace was not found", + req: &requests.LeaveNamespace{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + AuthenticatedTenantID: "00000000-0000-4000-0000-000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, ErrNamespaceNotFound). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", ErrNamespaceNotFound), + }, + }, + { + description: "fails when the user is not on the namespace", + req: &requests.LeaveNamespace{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + AuthenticatedTenantID: "00000000-0000-4000-0000-000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{}, + }, nil). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrAuthForbidden(), + }, + }, + { + description: "fails when the user is owner", + req: &requests.LeaveNamespace{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + AuthenticatedTenantID: "00000000-0000-4000-0000-000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + }, + }, + }, nil). + Once() + }, + expected: Expected{ + res: nil, + err: NewErrAuthForbidden(), + }, + }, + { + description: "fails when cannot remove the member", + req: &requests.LeaveNamespace{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + AuthenticatedTenantID: "00000000-0000-4000-0000-000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("NamespaceDeleteMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000000", Role: authorizer.RoleAdministrator}). + Return(errors.New("error")). + Once() + }, + expected: Expected{ + res: nil, + err: errors.New("error"), + }, + }, + { + description: "succeeds", + req: &requests.LeaveNamespace{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + AuthenticatedTenantID: "00000000-0000-4000-0000-000000000001", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("NamespaceDeleteMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000000", Role: authorizer.RoleAdministrator}). + Return(nil). + Once() + }, + expected: Expected{ + res: nil, + err: nil, + }, + }, + { + description: "succeeds when TenantID is equal to AuthenticatedTenantID", + req: &requests.LeaveNamespace{ + UserID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + AuthenticatedTenantID: "00000000-0000-4000-0000-000000000000", + }, + requiredMocks: func(ctx context.Context) { + user := &models.User{ + ID: "000000000000000000000000", + Status: models.UserStatusConfirmed, + Origin: models.UserOriginLocal, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + updatedUser := &models.User{ + ID: "000000000000000000000000", + Status: models.UserStatusConfirmed, + Origin: models.UserOriginLocal, + LastLogin: now, + MFA: models.UserMFA{ + Enabled: false, + }, + UserData: models.UserData{ + Username: "john_doe", + Email: "john.doe@test.com", + Name: "john doe", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + Preferences: models.UserPreferences{ + PreferredNamespace: "", + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + } + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(&models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleAdministrator, + }, + }, + }, nil). + Once() + storeMock. + On("NamespaceDeleteMembership", ctx, "00000000-0000-4000-0000-000000000000", &models.Member{ID: "000000000000000000000000", Role: authorizer.RoleAdministrator}). + Return(nil). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(user, nil). + Once() + storeMock. + On("UserUpdate", ctx, updatedUser). + Return(nil). + Once() + cacheMock. + On("Delete", ctx, "token_00000000-0000-4000-0000-000000000000000000000000000000000000"). + Return(nil). + Once() + + // NOTE: This test is a replica of TestService_CreateUserToken because this method + // internally calls it to create another token. Since this functionality is already tested, + // we are duplicating the test here to prevent failures. The important tests are all in the lines above. + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(user, nil). + Once() + storeMock. + On("NamespaceGetPreferred", ctx, "000000000000000000000000"). + Return(nil, store.ErrNoDocuments). + Once() + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + clockMock.On("Now").Return(now) + cacheMock. + On("Set", ctx, "token_000000000000000000000000", mock.Anything, time.Hour*72). + Return(nil). + Once() + }, + expected: Expected{ + res: &models.UserAuthResponse{ + ID: "000000000000000000000000", + Origin: models.UserOriginLocal.String(), + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + Name: "john doe", + User: "john_doe", + Email: "john.doe@test.com", + Tenant: "", + Role: "", + Token: "must ignore", + }, + err: nil, + }, + }, + } + + s := NewService(storeMock, privateKey, publicKey, cacheMock, clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.TODO() + tc.requiredMocks(ctx) + + res, err := s.LeaveNamespace(ctx, tc.req) + // Since the resulting token is not crucial for the assertion and + // difficult to mock, it is safe to ignore this field. + if res != nil { + res.Token = "must ignore" + } + + assert.Equal(t, tc.expected, Expected{res, err}) + }) + } + + storeMock.AssertExpectations(t) +} diff --git a/api/services/mocks/services.go b/api/services/mocks/services.go index 576afc50d6b..e38c430cb4c 100644 --- a/api/services/mocks/services.go +++ b/api/services/mocks/services.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.38.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -10,7 +10,7 @@ import ( models "github.com/shellhub-io/shellhub/pkg/models" - paginator "github.com/shellhub-io/shellhub/pkg/api/paginator" + pkgresponses "github.com/shellhub-io/shellhub/api/pkg/responses" requests "github.com/shellhub-io/shellhub/pkg/api/requests" @@ -18,7 +18,7 @@ import ( rsa "crypto/rsa" - template "text/template" + store "github.com/shellhub-io/shellhub/api/store" ) // Service is an autogenerated mock type for the Service type @@ -26,29 +26,29 @@ type Service struct { mock.Mock } -// AddNamespaceUser provides a mock function with given fields: ctx, memberUsername, memberRole, tenantID, userID -func (_m *Service) AddNamespaceUser(ctx context.Context, memberUsername string, memberRole string, tenantID string, userID string) (*models.Namespace, error) { - ret := _m.Called(ctx, memberUsername, memberRole, tenantID, userID) +// AddNamespaceMember provides a mock function with given fields: ctx, req +func (_m *Service) AddNamespaceMember(ctx context.Context, req *requests.NamespaceAddMember) (*models.Namespace, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { - panic("no return value specified for AddNamespaceUser") + panic("no return value specified for AddNamespaceMember") } var r0 *models.Namespace var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string) (*models.Namespace, error)); ok { - return rf(ctx, memberUsername, memberRole, tenantID, userID) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceAddMember) (*models.Namespace, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string) *models.Namespace); ok { - r0 = rf(ctx, memberUsername, memberRole, tenantID, userID) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceAddMember) *models.Namespace); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Namespace) } } - if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string) error); ok { - r1 = rf(ctx, memberUsername, memberRole, tenantID, userID) + if rf, ok := ret.Get(1).(func(context.Context, *requests.NamespaceAddMember) error); ok { + r1 = rf(ctx, req) } else { r1 = ret.Error(1) } @@ -56,22 +56,34 @@ func (_m *Service) AddNamespaceUser(ctx context.Context, memberUsername string, return r0, r1 } -// AddPublicKeyTag provides a mock function with given fields: ctx, tenant, fingerprint, tag -func (_m *Service) AddPublicKeyTag(ctx context.Context, tenant string, fingerprint string, tag string) error { - ret := _m.Called(ctx, tenant, fingerprint, tag) +// AuthAPIKey provides a mock function with given fields: ctx, key +func (_m *Service) AuthAPIKey(ctx context.Context, key string) (*models.APIKey, error) { + ret := _m.Called(ctx, key) if len(ret) == 0 { - panic("no return value specified for AddPublicKeyTag") + panic("no return value specified for AuthAPIKey") } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok { - r0 = rf(ctx, tenant, fingerprint, tag) + var r0 *models.APIKey + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (*models.APIKey, error)); ok { + return rf(ctx, key) + } + if rf, ok := ret.Get(0).(func(context.Context, string) *models.APIKey); ok { + r0 = rf(ctx, key) } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.APIKey) + } } - return r0 + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, key) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } // AuthCacheToken provides a mock function with given fields: ctx, tenant, id, token @@ -92,9 +104,9 @@ func (_m *Service) AuthCacheToken(ctx context.Context, tenant string, id string, return r0 } -// AuthDevice provides a mock function with given fields: ctx, req, remoteAddr -func (_m *Service) AuthDevice(ctx context.Context, req requests.DeviceAuth, remoteAddr string) (*models.DeviceAuthResponse, error) { - ret := _m.Called(ctx, req, remoteAddr) +// AuthDevice provides a mock function with given fields: ctx, req +func (_m *Service) AuthDevice(ctx context.Context, req requests.DeviceAuth) (*models.DeviceAuthResponse, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for AuthDevice") @@ -102,49 +114,19 @@ func (_m *Service) AuthDevice(ctx context.Context, req requests.DeviceAuth, remo var r0 *models.DeviceAuthResponse var r1 error - if rf, ok := ret.Get(0).(func(context.Context, requests.DeviceAuth, string) (*models.DeviceAuthResponse, error)); ok { - return rf(ctx, req, remoteAddr) + if rf, ok := ret.Get(0).(func(context.Context, requests.DeviceAuth) (*models.DeviceAuthResponse, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, requests.DeviceAuth, string) *models.DeviceAuthResponse); ok { - r0 = rf(ctx, req, remoteAddr) + if rf, ok := ret.Get(0).(func(context.Context, requests.DeviceAuth) *models.DeviceAuthResponse); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.DeviceAuthResponse) } } - if rf, ok := ret.Get(1).(func(context.Context, requests.DeviceAuth, string) error); ok { - r1 = rf(ctx, req, remoteAddr) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// AuthGetToken provides a mock function with given fields: ctx, id, mfa -func (_m *Service) AuthGetToken(ctx context.Context, id string, mfa bool) (*models.UserAuthResponse, error) { - ret := _m.Called(ctx, id, mfa) - - if len(ret) == 0 { - panic("no return value specified for AuthGetToken") - } - - var r0 *models.UserAuthResponse - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, bool) (*models.UserAuthResponse, error)); ok { - return rf(ctx, id, mfa) - } - if rf, ok := ret.Get(0).(func(context.Context, string, bool) *models.UserAuthResponse); ok { - r0 = rf(ctx, id, mfa) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.UserAuthResponse) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { - r1 = rf(ctx, id, mfa) + if rf, ok := ret.Get(1).(func(context.Context, requests.DeviceAuth) error); ok { + r1 = rf(ctx, req) } else { r1 = ret.Error(1) } @@ -180,32 +162,48 @@ func (_m *Service) AuthIsCacheToken(ctx context.Context, tenant string, id strin return r0, r1 } -// AuthMFA provides a mock function with given fields: ctx, id -func (_m *Service) AuthMFA(ctx context.Context, id string) (bool, error) { - ret := _m.Called(ctx, id) +// AuthLocalUser provides a mock function with given fields: ctx, req, sourceIP +func (_m *Service) AuthLocalUser(ctx context.Context, req *requests.AuthLocalUser, sourceIP string) (*models.UserAuthResponse, int64, string, error) { + ret := _m.Called(ctx, req, sourceIP) if len(ret) == 0 { - panic("no return value specified for AuthMFA") + panic("no return value specified for AuthLocalUser") } - var r0 bool - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (bool, error)); ok { - return rf(ctx, id) + var r0 *models.UserAuthResponse + var r1 int64 + var r2 string + var r3 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.AuthLocalUser, string) (*models.UserAuthResponse, int64, string, error)); ok { + return rf(ctx, req, sourceIP) } - if rf, ok := ret.Get(0).(func(context.Context, string) bool); ok { - r0 = rf(ctx, id) + if rf, ok := ret.Get(0).(func(context.Context, *requests.AuthLocalUser, string) *models.UserAuthResponse); ok { + r0 = rf(ctx, req, sourceIP) } else { - r0 = ret.Get(0).(bool) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.UserAuthResponse) + } } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, id) + if rf, ok := ret.Get(1).(func(context.Context, *requests.AuthLocalUser, string) int64); ok { + r1 = rf(ctx, req, sourceIP) } else { - r1 = ret.Error(1) + r1 = ret.Get(1).(int64) } - return r0, r1 + if rf, ok := ret.Get(2).(func(context.Context, *requests.AuthLocalUser, string) string); ok { + r2 = rf(ctx, req, sourceIP) + } else { + r2 = ret.Get(2).(string) + } + + if rf, ok := ret.Get(3).(func(context.Context, *requests.AuthLocalUser, string) error); ok { + r3 = rf(ctx, req, sourceIP) + } else { + r3 = ret.Error(3) + } + + return r0, r1, r2, r3 } // AuthPublicKey provides a mock function with given fields: ctx, req @@ -238,36 +236,6 @@ func (_m *Service) AuthPublicKey(ctx context.Context, req requests.PublicKeyAuth return r0, r1 } -// AuthSwapToken provides a mock function with given fields: ctx, ID, tenant -func (_m *Service) AuthSwapToken(ctx context.Context, ID string, tenant string) (*models.UserAuthResponse, error) { - ret := _m.Called(ctx, ID, tenant) - - if len(ret) == 0 { - panic("no return value specified for AuthSwapToken") - } - - var r0 *models.UserAuthResponse - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.UserAuthResponse, error)); ok { - return rf(ctx, ID, tenant) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.UserAuthResponse); ok { - r0 = rf(ctx, ID, tenant) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.UserAuthResponse) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, ID, tenant) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // AuthUncacheToken provides a mock function with given fields: ctx, tenant, id func (_m *Service) AuthUncacheToken(ctx context.Context, tenant string, id string) error { ret := _m.Called(ctx, tenant, id) @@ -286,69 +254,9 @@ func (_m *Service) AuthUncacheToken(ctx context.Context, tenant string, id strin return r0 } -// AuthUser provides a mock function with given fields: ctx, model -func (_m *Service) AuthUser(ctx context.Context, model *models.UserAuthRequest) (*models.UserAuthResponse, error) { - ret := _m.Called(ctx, model) - - if len(ret) == 0 { - panic("no return value specified for AuthUser") - } - - var r0 *models.UserAuthResponse - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *models.UserAuthRequest) (*models.UserAuthResponse, error)); ok { - return rf(ctx, model) - } - if rf, ok := ret.Get(0).(func(context.Context, *models.UserAuthRequest) *models.UserAuthResponse); ok { - r0 = rf(ctx, model) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.UserAuthResponse) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, *models.UserAuthRequest) error); ok { - r1 = rf(ctx, model) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// AuthUserInfo provides a mock function with given fields: ctx, username, tenant, token -func (_m *Service) AuthUserInfo(ctx context.Context, username string, tenant string, token string) (*models.UserAuthResponse, error) { - ret := _m.Called(ctx, username, tenant, token) - - if len(ret) == 0 { - panic("no return value specified for AuthUserInfo") - } - - var r0 *models.UserAuthResponse - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) (*models.UserAuthResponse, error)); ok { - return rf(ctx, username, tenant, token) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) *models.UserAuthResponse); ok { - r0 = rf(ctx, username, tenant, token) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.UserAuthResponse) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string, string, string) error); ok { - r1 = rf(ctx, username, tenant, token) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// BillingEvaluate provides a mock function with given fields: _a0, _a1 -func (_m *Service) BillingEvaluate(_a0 internalclient.Client, _a1 string) (bool, error) { - ret := _m.Called(_a0, _a1) +// BillingEvaluate provides a mock function with given fields: ctx, client, tenant +func (_m *Service) BillingEvaluate(ctx context.Context, client internalclient.Client, tenant string) (bool, error) { + ret := _m.Called(ctx, client, tenant) if len(ret) == 0 { panic("no return value specified for BillingEvaluate") @@ -356,17 +264,17 @@ func (_m *Service) BillingEvaluate(_a0 internalclient.Client, _a1 string) (bool, var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(internalclient.Client, string) (bool, error)); ok { - return rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, internalclient.Client, string) (bool, error)); ok { + return rf(ctx, client, tenant) } - if rf, ok := ret.Get(0).(func(internalclient.Client, string) bool); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, internalclient.Client, string) bool); ok { + r0 = rf(ctx, client, tenant) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(internalclient.Client, string) error); ok { - r1 = rf(_a0, _a1) + if rf, ok := ret.Get(1).(func(context.Context, internalclient.Client, string) error); ok { + r1 = rf(ctx, client, tenant) } else { r1 = ret.Error(1) } @@ -374,17 +282,17 @@ func (_m *Service) BillingEvaluate(_a0 internalclient.Client, _a1 string) (bool, return r0, r1 } -// BillingReport provides a mock function with given fields: _a0, _a1, _a2 -func (_m *Service) BillingReport(_a0 internalclient.Client, _a1 string, _a2 string) error { - ret := _m.Called(_a0, _a1, _a2) +// BillingReport provides a mock function with given fields: ctx, client, tenant, action +func (_m *Service) BillingReport(ctx context.Context, client internalclient.Client, tenant string, action string) error { + ret := _m.Called(ctx, client, tenant, action) if len(ret) == 0 { panic("no return value specified for BillingReport") } var r0 error - if rf, ok := ret.Get(0).(func(internalclient.Client, string, string) error); ok { - r0 = rf(_a0, _a1, _a2) + if rf, ok := ret.Get(0).(func(context.Context, internalclient.Client, string, string) error); ok { + r0 = rf(ctx, client, tenant, action) } else { r0 = ret.Error(0) } @@ -392,27 +300,39 @@ func (_m *Service) BillingReport(_a0 internalclient.Client, _a1 string, _a2 stri return r0 } -// CreateDeviceTag provides a mock function with given fields: ctx, uid, tag -func (_m *Service) CreateDeviceTag(ctx context.Context, uid models.UID, tag string) error { - ret := _m.Called(ctx, uid, tag) +// CreateAPIKey provides a mock function with given fields: ctx, req +func (_m *Service) CreateAPIKey(ctx context.Context, req *requests.CreateAPIKey) (*responses.CreateAPIKey, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { - panic("no return value specified for CreateDeviceTag") + panic("no return value specified for CreateAPIKey") } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, string) error); ok { - r0 = rf(ctx, uid, tag) + var r0 *responses.CreateAPIKey + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.CreateAPIKey) (*responses.CreateAPIKey, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *requests.CreateAPIKey) *responses.CreateAPIKey); ok { + r0 = rf(ctx, req) } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*responses.CreateAPIKey) + } } - return r0 + if rf, ok := ret.Get(1).(func(context.Context, *requests.CreateAPIKey) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } -// CreateNamespace provides a mock function with given fields: ctx, namespace, userID -func (_m *Service) CreateNamespace(ctx context.Context, namespace requests.NamespaceCreate, userID string) (*models.Namespace, error) { - ret := _m.Called(ctx, namespace, userID) +// CreateNamespace provides a mock function with given fields: ctx, namespace +func (_m *Service) CreateNamespace(ctx context.Context, namespace *requests.NamespaceCreate) (*models.Namespace, error) { + ret := _m.Called(ctx, namespace) if len(ret) == 0 { panic("no return value specified for CreateNamespace") @@ -420,19 +340,19 @@ func (_m *Service) CreateNamespace(ctx context.Context, namespace requests.Names var r0 *models.Namespace var r1 error - if rf, ok := ret.Get(0).(func(context.Context, requests.NamespaceCreate, string) (*models.Namespace, error)); ok { - return rf(ctx, namespace, userID) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceCreate) (*models.Namespace, error)); ok { + return rf(ctx, namespace) } - if rf, ok := ret.Get(0).(func(context.Context, requests.NamespaceCreate, string) *models.Namespace); ok { - r0 = rf(ctx, namespace, userID) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceCreate) *models.Namespace); ok { + r0 = rf(ctx, namespace) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Namespace) } } - if rf, ok := ret.Get(1).(func(context.Context, requests.NamespaceCreate, string) error); ok { - r1 = rf(ctx, namespace, userID) + if rf, ok := ret.Get(1).(func(context.Context, *requests.NamespaceCreate) error); ok { + r1 = rf(ctx, namespace) } else { r1 = ret.Error(1) } @@ -530,6 +450,73 @@ func (_m *Service) CreateSession(ctx context.Context, session requests.SessionCr return r0, r1 } +// CreateTag provides a mock function with given fields: ctx, req +func (_m *Service) CreateTag(ctx context.Context, req *requests.CreateTag) (string, []string, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for CreateTag") + } + + var r0 string + var r1 []string + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.CreateTag) (string, []string, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *requests.CreateTag) string); ok { + r0 = rf(ctx, req) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(context.Context, *requests.CreateTag) []string); ok { + r1 = rf(ctx, req) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).([]string) + } + } + + if rf, ok := ret.Get(2).(func(context.Context, *requests.CreateTag) error); ok { + r2 = rf(ctx, req) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// CreateUserToken provides a mock function with given fields: ctx, req +func (_m *Service) CreateUserToken(ctx context.Context, req *requests.CreateUserToken) (*models.UserAuthResponse, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for CreateUserToken") + } + + var r0 *models.UserAuthResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.CreateUserToken) (*models.UserAuthResponse, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *requests.CreateUserToken) *models.UserAuthResponse); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.UserAuthResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *requests.CreateUserToken) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // DeactivateSession provides a mock function with given fields: ctx, uid func (_m *Service) DeactivateSession(ctx context.Context, uid models.UID) error { ret := _m.Called(ctx, uid) @@ -548,6 +535,24 @@ func (_m *Service) DeactivateSession(ctx context.Context, uid models.UID) error return r0 } +// DeleteAPIKey provides a mock function with given fields: ctx, req +func (_m *Service) DeleteAPIKey(ctx context.Context, req *requests.DeleteAPIKey) error { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for DeleteAPIKey") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.DeleteAPIKey) error); ok { + r0 = rf(ctx, req) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // DeleteDevice provides a mock function with given fields: ctx, uid, tenant func (_m *Service) DeleteDevice(ctx context.Context, uid models.UID, tenant string) error { ret := _m.Called(ctx, uid, tenant) @@ -602,35 +607,17 @@ func (_m *Service) DeletePublicKey(ctx context.Context, fingerprint string, tena return r0 } -// DeleteTag provides a mock function with given fields: ctx, tenant, tag -func (_m *Service) DeleteTag(ctx context.Context, tenant string, tag string) error { - ret := _m.Called(ctx, tenant, tag) +// DeleteTag provides a mock function with given fields: ctx, req +func (_m *Service) DeleteTag(ctx context.Context, req *requests.DeleteTag) error { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for DeleteTag") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { - r0 = rf(ctx, tenant, tag) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// DeviceHeartbeat provides a mock function with given fields: ctx, uid -func (_m *Service) DeviceHeartbeat(ctx context.Context, uid models.UID) error { - ret := _m.Called(ctx, uid) - - if len(ret) == 0 { - panic("no return value specified for DeviceHeartbeat") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID) error); ok { - r0 = rf(ctx, uid) + if rf, ok := ret.Get(0).(func(context.Context, *requests.DeleteTag) error); ok { + r0 = rf(ctx, req) } else { r0 = ret.Error(0) } @@ -638,9 +625,9 @@ func (_m *Service) DeviceHeartbeat(ctx context.Context, uid models.UID) error { return r0 } -// EditNamespace provides a mock function with given fields: ctx, tenantID, name -func (_m *Service) EditNamespace(ctx context.Context, tenantID string, name string) (*models.Namespace, error) { - ret := _m.Called(ctx, tenantID, name) +// EditNamespace provides a mock function with given fields: ctx, req +func (_m *Service) EditNamespace(ctx context.Context, req *requests.NamespaceEdit) (*models.Namespace, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for EditNamespace") @@ -648,19 +635,19 @@ func (_m *Service) EditNamespace(ctx context.Context, tenantID string, name stri var r0 *models.Namespace var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.Namespace, error)); ok { - return rf(ctx, tenantID, name) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceEdit) (*models.Namespace, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.Namespace); ok { - r0 = rf(ctx, tenantID, name) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceEdit) *models.Namespace); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Namespace) } } - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, tenantID, name) + if rf, ok := ret.Get(1).(func(context.Context, *requests.NamespaceEdit) error); ok { + r1 = rf(ctx, req) } else { r1 = ret.Error(1) } @@ -668,24 +655,6 @@ func (_m *Service) EditNamespace(ctx context.Context, tenantID string, name stri return r0, r1 } -// EditNamespaceUser provides a mock function with given fields: ctx, tenantID, userID, memberID, memberNewRole -func (_m *Service) EditNamespaceUser(ctx context.Context, tenantID string, userID string, memberID string, memberNewRole string) error { - ret := _m.Called(ctx, tenantID, userID, memberID, memberNewRole) - - if len(ret) == 0 { - panic("no return value specified for EditNamespaceUser") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string) error); ok { - r0 = rf(ctx, tenantID, userID, memberID, memberNewRole) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // EditSessionRecordStatus provides a mock function with given fields: ctx, sessionRecord, tenantID func (_m *Service) EditSessionRecordStatus(ctx context.Context, sessionRecord bool, tenantID string) error { ret := _m.Called(ctx, sessionRecord, tenantID) @@ -760,6 +729,24 @@ func (_m *Service) EvaluateKeyUsername(ctx context.Context, key *models.PublicKe return r0, r1 } +// EventSession provides a mock function with given fields: ctx, uid, event +func (_m *Service) EventSession(ctx context.Context, uid models.UID, event *models.SessionEvent) error { + ret := _m.Called(ctx, uid, event) + + if len(ret) == 0 { + panic("no return value specified for EventSession") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.UID, *models.SessionEvent) error); ok { + r0 = rf(ctx, uid, event) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // GetDevice provides a mock function with given fields: ctx, uid func (_m *Service) GetDevice(ctx context.Context, uid models.UID) (*models.Device, error) { ret := _m.Called(ctx, uid) @@ -790,45 +777,15 @@ func (_m *Service) GetDevice(ctx context.Context, uid models.UID) (*models.Devic return r0, r1 } -// GetDeviceByPublicURLAddress provides a mock function with given fields: ctx, address -func (_m *Service) GetDeviceByPublicURLAddress(ctx context.Context, address string) (*models.Device, error) { - ret := _m.Called(ctx, address) +// GetNamespace provides a mock function with given fields: ctx, tenantID +func (_m *Service) GetNamespace(ctx context.Context, tenantID string) (*models.Namespace, error) { + ret := _m.Called(ctx, tenantID) if len(ret) == 0 { - panic("no return value specified for GetDeviceByPublicURLAddress") + panic("no return value specified for GetNamespace") } - var r0 *models.Device - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Device, error)); ok { - return rf(ctx, address) - } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.Device); ok { - r0 = rf(ctx, address) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, address) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetNamespace provides a mock function with given fields: ctx, tenantID -func (_m *Service) GetNamespace(ctx context.Context, tenantID string) (*models.Namespace, error) { - ret := _m.Called(ctx, tenantID) - - if len(ret) == 0 { - panic("no return value specified for GetNamespace") - } - - var r0 *models.Namespace + var r0 *models.Namespace var r1 error if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Namespace, error)); ok { return rf(ctx, tenantID) @@ -938,9 +895,9 @@ func (_m *Service) GetSessionRecord(ctx context.Context, tenantID string) (bool, return r0, r1 } -// GetStats provides a mock function with given fields: ctx -func (_m *Service) GetStats(ctx context.Context) (*models.Stats, error) { - ret := _m.Called(ctx) +// GetStats provides a mock function with given fields: ctx, req +func (_m *Service) GetStats(ctx context.Context, req *requests.GetStats) (*models.Stats, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for GetStats") @@ -948,19 +905,19 @@ func (_m *Service) GetStats(ctx context.Context) (*models.Stats, error) { var r0 *models.Stats var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*models.Stats, error)); ok { - return rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, *requests.GetStats) (*models.Stats, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context) *models.Stats); ok { - r0 = rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, *requests.GetStats) *models.Stats); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Stats) } } - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) + if rf, ok := ret.Get(1).(func(context.Context, *requests.GetStats) error); ok { + r1 = rf(ctx, req) } else { r1 = ret.Error(1) } @@ -968,41 +925,62 @@ func (_m *Service) GetStats(ctx context.Context) (*models.Stats, error) { return r0, r1 } -// GetTags provides a mock function with given fields: ctx, tenant -func (_m *Service) GetTags(ctx context.Context, tenant string) ([]string, int, error) { - ret := _m.Called(ctx, tenant) +// GetSystemInfo provides a mock function with given fields: ctx, req +func (_m *Service) GetSystemInfo(ctx context.Context, req *requests.GetSystemInfo) (*pkgresponses.SystemInfo, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { - panic("no return value specified for GetTags") + panic("no return value specified for GetSystemInfo") } - var r0 []string - var r1 int - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]string, int, error)); ok { - return rf(ctx, tenant) + var r0 *pkgresponses.SystemInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.GetSystemInfo) (*pkgresponses.SystemInfo, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, string) []string); ok { - r0 = rf(ctx, tenant) + if rf, ok := ret.Get(0).(func(context.Context, *requests.GetSystemInfo) *pkgresponses.SystemInfo); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]string) + r0 = ret.Get(0).(*pkgresponses.SystemInfo) } } - if rf, ok := ret.Get(1).(func(context.Context, string) int); ok { - r1 = rf(ctx, tenant) + if rf, ok := ret.Get(1).(func(context.Context, *requests.GetSystemInfo) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetUserRole provides a mock function with given fields: ctx, tenantID, userID +func (_m *Service) GetUserRole(ctx context.Context, tenantID string, userID string) (string, error) { + ret := _m.Called(ctx, tenantID, userID) + + if len(ret) == 0 { + panic("no return value specified for GetUserRole") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) (string, error)); ok { + return rf(ctx, tenantID, userID) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string) string); ok { + r0 = rf(ctx, tenantID, userID) } else { - r1 = ret.Get(1).(int) + r0 = ret.Get(0).(string) } - if rf, ok := ret.Get(2).(func(context.Context, string) error); ok { - r2 = rf(ctx, tenant) + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, tenantID, userID) } else { - r2 = ret.Error(2) + r1 = ret.Error(1) } - return r0, r1, r2 + return r0, r1 } // KeepAliveSession provides a mock function with given fields: ctx, uid @@ -1023,9 +1001,76 @@ func (_m *Service) KeepAliveSession(ctx context.Context, uid models.UID) error { return r0 } -// ListDevices provides a mock function with given fields: ctx, tenant, pagination, filter, status, sort, order -func (_m *Service) ListDevices(ctx context.Context, tenant string, pagination paginator.Query, filter []models.Filter, status models.DeviceStatus, sort string, order string) ([]models.Device, int, error) { - ret := _m.Called(ctx, tenant, pagination, filter, status, sort, order) +// LeaveNamespace provides a mock function with given fields: ctx, req +func (_m *Service) LeaveNamespace(ctx context.Context, req *requests.LeaveNamespace) (*models.UserAuthResponse, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for LeaveNamespace") + } + + var r0 *models.UserAuthResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.LeaveNamespace) (*models.UserAuthResponse, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *requests.LeaveNamespace) *models.UserAuthResponse); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.UserAuthResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *requests.LeaveNamespace) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListAPIKeys provides a mock function with given fields: ctx, req +func (_m *Service) ListAPIKeys(ctx context.Context, req *requests.ListAPIKey) ([]models.APIKey, int, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for ListAPIKeys") + } + + var r0 []models.APIKey + var r1 int + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.ListAPIKey) ([]models.APIKey, int, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *requests.ListAPIKey) []models.APIKey); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.APIKey) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *requests.ListAPIKey) int); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Get(1).(int) + } + + if rf, ok := ret.Get(2).(func(context.Context, *requests.ListAPIKey) error); ok { + r2 = rf(ctx, req) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// ListDevices provides a mock function with given fields: ctx, req +func (_m *Service) ListDevices(ctx context.Context, req *requests.DeviceList) ([]models.Device, int, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for ListDevices") @@ -1034,25 +1079,25 @@ func (_m *Service) ListDevices(ctx context.Context, tenant string, pagination pa var r0 []models.Device var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string, paginator.Query, []models.Filter, models.DeviceStatus, string, string) ([]models.Device, int, error)); ok { - return rf(ctx, tenant, pagination, filter, status, sort, order) + if rf, ok := ret.Get(0).(func(context.Context, *requests.DeviceList) ([]models.Device, int, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, string, paginator.Query, []models.Filter, models.DeviceStatus, string, string) []models.Device); ok { - r0 = rf(ctx, tenant, pagination, filter, status, sort, order) + if rf, ok := ret.Get(0).(func(context.Context, *requests.DeviceList) []models.Device); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.Device) } } - if rf, ok := ret.Get(1).(func(context.Context, string, paginator.Query, []models.Filter, models.DeviceStatus, string, string) int); ok { - r1 = rf(ctx, tenant, pagination, filter, status, sort, order) + if rf, ok := ret.Get(1).(func(context.Context, *requests.DeviceList) int); ok { + r1 = rf(ctx, req) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, string, paginator.Query, []models.Filter, models.DeviceStatus, string, string) error); ok { - r2 = rf(ctx, tenant, pagination, filter, status, sort, order) + if rf, ok := ret.Get(2).(func(context.Context, *requests.DeviceList) error); ok { + r2 = rf(ctx, req) } else { r2 = ret.Error(2) } @@ -1060,9 +1105,9 @@ func (_m *Service) ListDevices(ctx context.Context, tenant string, pagination pa return r0, r1, r2 } -// ListNamespaces provides a mock function with given fields: ctx, pagination, filter, export -func (_m *Service) ListNamespaces(ctx context.Context, pagination paginator.Query, filter []models.Filter, export bool) ([]models.Namespace, int, error) { - ret := _m.Called(ctx, pagination, filter, export) +// ListNamespaces provides a mock function with given fields: ctx, req +func (_m *Service) ListNamespaces(ctx context.Context, req *requests.NamespaceList) ([]models.Namespace, int, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for ListNamespaces") @@ -1071,25 +1116,25 @@ func (_m *Service) ListNamespaces(ctx context.Context, pagination paginator.Quer var r0 []models.Namespace var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, []models.Filter, bool) ([]models.Namespace, int, error)); ok { - return rf(ctx, pagination, filter, export) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceList) ([]models.Namespace, int, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, []models.Filter, bool) []models.Namespace); ok { - r0 = rf(ctx, pagination, filter, export) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceList) []models.Namespace); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.Namespace) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query, []models.Filter, bool) int); ok { - r1 = rf(ctx, pagination, filter, export) + if rf, ok := ret.Get(1).(func(context.Context, *requests.NamespaceList) int); ok { + r1 = rf(ctx, req) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query, []models.Filter, bool) error); ok { - r2 = rf(ctx, pagination, filter, export) + if rf, ok := ret.Get(2).(func(context.Context, *requests.NamespaceList) error); ok { + r2 = rf(ctx, req) } else { r2 = ret.Error(2) } @@ -1097,9 +1142,9 @@ func (_m *Service) ListNamespaces(ctx context.Context, pagination paginator.Quer return r0, r1, r2 } -// ListPublicKeys provides a mock function with given fields: ctx, pagination -func (_m *Service) ListPublicKeys(ctx context.Context, pagination paginator.Query) ([]models.PublicKey, int, error) { - ret := _m.Called(ctx, pagination) +// ListPublicKeys provides a mock function with given fields: ctx, req +func (_m *Service) ListPublicKeys(ctx context.Context, req *requests.ListPublicKeys) ([]models.PublicKey, int, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for ListPublicKeys") @@ -1108,25 +1153,25 @@ func (_m *Service) ListPublicKeys(ctx context.Context, pagination paginator.Quer var r0 []models.PublicKey var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) ([]models.PublicKey, int, error)); ok { - return rf(ctx, pagination) + if rf, ok := ret.Get(0).(func(context.Context, *requests.ListPublicKeys) ([]models.PublicKey, int, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) []models.PublicKey); ok { - r0 = rf(ctx, pagination) + if rf, ok := ret.Get(0).(func(context.Context, *requests.ListPublicKeys) []models.PublicKey); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.PublicKey) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query) int); ok { - r1 = rf(ctx, pagination) + if rf, ok := ret.Get(1).(func(context.Context, *requests.ListPublicKeys) int); ok { + r1 = rf(ctx, req) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query) error); ok { - r2 = rf(ctx, pagination) + if rf, ok := ret.Get(2).(func(context.Context, *requests.ListPublicKeys) error); ok { + r2 = rf(ctx, req) } else { r2 = ret.Error(2) } @@ -1134,9 +1179,9 @@ func (_m *Service) ListPublicKeys(ctx context.Context, pagination paginator.Quer return r0, r1, r2 } -// ListSessions provides a mock function with given fields: ctx, pagination -func (_m *Service) ListSessions(ctx context.Context, pagination paginator.Query) ([]models.Session, int, error) { - ret := _m.Called(ctx, pagination) +// ListSessions provides a mock function with given fields: ctx, req +func (_m *Service) ListSessions(ctx context.Context, req *requests.ListSessions) ([]models.Session, int, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for ListSessions") @@ -1145,25 +1190,62 @@ func (_m *Service) ListSessions(ctx context.Context, pagination paginator.Query) var r0 []models.Session var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) ([]models.Session, int, error)); ok { - return rf(ctx, pagination) + if rf, ok := ret.Get(0).(func(context.Context, *requests.ListSessions) ([]models.Session, int, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) []models.Session); ok { - r0 = rf(ctx, pagination) + if rf, ok := ret.Get(0).(func(context.Context, *requests.ListSessions) []models.Session); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.Session) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query) int); ok { - r1 = rf(ctx, pagination) + if rf, ok := ret.Get(1).(func(context.Context, *requests.ListSessions) int); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Get(1).(int) + } + + if rf, ok := ret.Get(2).(func(context.Context, *requests.ListSessions) error); ok { + r2 = rf(ctx, req) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// ListTags provides a mock function with given fields: ctx, req +func (_m *Service) ListTags(ctx context.Context, req *requests.ListTags) ([]models.Tag, int, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for ListTags") + } + + var r0 []models.Tag + var r1 int + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.ListTags) ([]models.Tag, int, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *requests.ListTags) []models.Tag); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.Tag) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *requests.ListTags) int); ok { + r1 = rf(ctx, req) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query) error); ok { - r2 = rf(ctx, pagination) + if rf, ok := ret.Get(2).(func(context.Context, *requests.ListTags) error); ok { + r2 = rf(ctx, req) } else { r2 = ret.Error(2) } @@ -1201,17 +1283,17 @@ func (_m *Service) LookupDevice(ctx context.Context, namespace string, name stri return r0, r1 } -// OffineDevice provides a mock function with given fields: ctx, uid, online -func (_m *Service) OffineDevice(ctx context.Context, uid models.UID, online bool) error { - ret := _m.Called(ctx, uid, online) +// OfflineDevice provides a mock function with given fields: ctx, uid +func (_m *Service) OfflineDevice(ctx context.Context, uid models.UID) error { + ret := _m.Called(ctx, uid) if len(ret) == 0 { - panic("no return value specified for OffineDevice") + panic("no return value specified for OfflineDevice") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, bool) error); ok { - r0 = rf(ctx, uid, online) + if rf, ok := ret.Get(0).(func(context.Context, models.UID) error); ok { + r0 = rf(ctx, uid) } else { r0 = ret.Error(0) } @@ -1219,7 +1301,7 @@ func (_m *Service) OffineDevice(ctx context.Context, uid models.UID, online bool return r0 } -// PublicKey provides a mock function with given fields: +// PublicKey provides a mock function with no fields func (_m *Service) PublicKey() *rsa.PublicKey { ret := _m.Called() @@ -1239,17 +1321,35 @@ func (_m *Service) PublicKey() *rsa.PublicKey { return r0 } -// RemoveDeviceTag provides a mock function with given fields: ctx, uid, tag -func (_m *Service) RemoveDeviceTag(ctx context.Context, uid models.UID, tag string) error { - ret := _m.Called(ctx, uid, tag) +// PullTagFrom provides a mock function with given fields: ctx, target, req +func (_m *Service) PullTagFrom(ctx context.Context, target store.TagTarget, req *requests.PullTag) error { + ret := _m.Called(ctx, target, req) if len(ret) == 0 { - panic("no return value specified for RemoveDeviceTag") + panic("no return value specified for PullTagFrom") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, string) error); ok { - r0 = rf(ctx, uid, tag) + if rf, ok := ret.Get(0).(func(context.Context, store.TagTarget, *requests.PullTag) error); ok { + r0 = rf(ctx, target, req) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// PushTagTo provides a mock function with given fields: ctx, target, req +func (_m *Service) PushTagTo(ctx context.Context, target store.TagTarget, req *requests.PushTag) error { + ret := _m.Called(ctx, target, req) + + if len(ret) == 0 { + panic("no return value specified for PushTagTo") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, store.TagTarget, *requests.PushTag) error); ok { + r0 = rf(ctx, target, req) } else { r0 = ret.Error(0) } @@ -1257,29 +1357,29 @@ func (_m *Service) RemoveDeviceTag(ctx context.Context, uid models.UID, tag stri return r0 } -// RemoveNamespaceUser provides a mock function with given fields: ctx, tenantID, memberID, userID -func (_m *Service) RemoveNamespaceUser(ctx context.Context, tenantID string, memberID string, userID string) (*models.Namespace, error) { - ret := _m.Called(ctx, tenantID, memberID, userID) +// RemoveNamespaceMember provides a mock function with given fields: ctx, req +func (_m *Service) RemoveNamespaceMember(ctx context.Context, req *requests.NamespaceRemoveMember) (*models.Namespace, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { - panic("no return value specified for RemoveNamespaceUser") + panic("no return value specified for RemoveNamespaceMember") } var r0 *models.Namespace var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) (*models.Namespace, error)); ok { - return rf(ctx, tenantID, memberID, userID) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceRemoveMember) (*models.Namespace, error)); ok { + return rf(ctx, req) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) *models.Namespace); ok { - r0 = rf(ctx, tenantID, memberID, userID) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceRemoveMember) *models.Namespace); ok { + r0 = rf(ctx, req) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Namespace) } } - if rf, ok := ret.Get(1).(func(context.Context, string, string, string) error); ok { - r1 = rf(ctx, tenantID, memberID, userID) + if rf, ok := ret.Get(1).(func(context.Context, *requests.NamespaceRemoveMember) error); ok { + r1 = rf(ctx, req) } else { r1 = ret.Error(1) } @@ -1287,24 +1387,6 @@ func (_m *Service) RemoveNamespaceUser(ctx context.Context, tenantID string, mem return r0, r1 } -// RemovePublicKeyTag provides a mock function with given fields: ctx, tenant, fingerprint, tag -func (_m *Service) RemovePublicKeyTag(ctx context.Context, tenant string, fingerprint string, tag string) error { - ret := _m.Called(ctx, tenant, fingerprint, tag) - - if len(ret) == 0 { - panic("no return value specified for RemovePublicKeyTag") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok { - r0 = rf(ctx, tenant, fingerprint, tag) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // RenameDevice provides a mock function with given fields: ctx, uid, name, tenant func (_m *Service) RenameDevice(ctx context.Context, uid models.UID, name string, tenant string) error { ret := _m.Called(ctx, uid, name, tenant) @@ -1323,58 +1405,34 @@ func (_m *Service) RenameDevice(ctx context.Context, uid models.UID, name string return r0 } -// RenameTag provides a mock function with given fields: ctx, tenant, oldTag, newTag -func (_m *Service) RenameTag(ctx context.Context, tenant string, oldTag string, newTag string) error { - ret := _m.Called(ctx, tenant, oldTag, newTag) +// ResolveDevice provides a mock function with given fields: ctx, req +func (_m *Service) ResolveDevice(ctx context.Context, req *requests.ResolveDevice) (*models.Device, error) { + ret := _m.Called(ctx, req) if len(ret) == 0 { - panic("no return value specified for RenameTag") + panic("no return value specified for ResolveDevice") } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok { - r0 = rf(ctx, tenant, oldTag, newTag) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// SetDevicePosition provides a mock function with given fields: ctx, uid, ip -func (_m *Service) SetDevicePosition(ctx context.Context, uid models.UID, ip string) error { - ret := _m.Called(ctx, uid, ip) - - if len(ret) == 0 { - panic("no return value specified for SetDevicePosition") + var r0 *models.Device + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.ResolveDevice) (*models.Device, error)); ok { + return rf(ctx, req) } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, string) error); ok { - r0 = rf(ctx, uid, ip) + if rf, ok := ret.Get(0).(func(context.Context, *requests.ResolveDevice) *models.Device); ok { + r0 = rf(ctx, req) } else { - r0 = ret.Error(0) - } - - return r0 -} - -// SetSessionAuthenticated provides a mock function with given fields: ctx, uid, authenticated -func (_m *Service) SetSessionAuthenticated(ctx context.Context, uid models.UID, authenticated bool) error { - ret := _m.Called(ctx, uid, authenticated) - - if len(ret) == 0 { - panic("no return value specified for SetSessionAuthenticated") + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Device) + } } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, bool) error); ok { - r0 = rf(ctx, uid, authenticated) + if rf, ok := ret.Get(1).(func(context.Context, *requests.ResolveDevice) error); ok { + r1 = rf(ctx, req) } else { - r0 = ret.Error(0) + r1 = ret.Error(1) } - return r0 + return r0, r1 } // Setup provides a mock function with given fields: ctx, req @@ -1395,68 +1453,45 @@ func (_m *Service) Setup(ctx context.Context, req requests.Setup) error { return r0 } -// SystemDownloadInstallScript provides a mock function with given fields: ctx, req -func (_m *Service) SystemDownloadInstallScript(ctx context.Context, req requests.SystemInstallScript) (*template.Template, map[string]interface{}, error) { - ret := _m.Called(ctx, req) +// SetupVerify provides a mock function with given fields: ctx, sign +func (_m *Service) SetupVerify(ctx context.Context, sign string) error { + ret := _m.Called(ctx, sign) if len(ret) == 0 { - panic("no return value specified for SystemDownloadInstallScript") + panic("no return value specified for SetupVerify") } - var r0 *template.Template - var r1 map[string]interface{} - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, requests.SystemInstallScript) (*template.Template, map[string]interface{}, error)); ok { - return rf(ctx, req) - } - if rf, ok := ret.Get(0).(func(context.Context, requests.SystemInstallScript) *template.Template); ok { - r0 = rf(ctx, req) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*template.Template) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, requests.SystemInstallScript) map[string]interface{}); ok { - r1 = rf(ctx, req) - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).(map[string]interface{}) - } - } - - if rf, ok := ret.Get(2).(func(context.Context, requests.SystemInstallScript) error); ok { - r2 = rf(ctx, req) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, sign) } else { - r2 = ret.Error(2) + r0 = ret.Error(0) } - return r0, r1, r2 + return r0 } -// SystemGetInfo provides a mock function with given fields: ctx, req -func (_m *Service) SystemGetInfo(ctx context.Context, req requests.SystemGetInfo) (*models.SystemInfo, error) { - ret := _m.Called(ctx, req) +// SystemDownloadInstallScript provides a mock function with given fields: ctx +func (_m *Service) SystemDownloadInstallScript(ctx context.Context) (string, error) { + ret := _m.Called(ctx) if len(ret) == 0 { - panic("no return value specified for SystemGetInfo") + panic("no return value specified for SystemDownloadInstallScript") } - var r0 *models.SystemInfo + var r0 string var r1 error - if rf, ok := ret.Get(0).(func(context.Context, requests.SystemGetInfo) (*models.SystemInfo, error)); ok { - return rf(ctx, req) + if rf, ok := ret.Get(0).(func(context.Context) (string, error)); ok { + return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context, requests.SystemGetInfo) *models.SystemInfo); ok { - r0 = rf(ctx, req) + if rf, ok := ret.Get(0).(func(context.Context) string); ok { + r0 = rf(ctx) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.SystemInfo) - } + r0 = ret.Get(0).(string) } - if rf, ok := ret.Get(1).(func(context.Context, requests.SystemGetInfo) error); ok { - r1 = rf(ctx, req) + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) } else { r1 = ret.Error(1) } @@ -1464,47 +1499,35 @@ func (_m *Service) SystemGetInfo(ctx context.Context, req requests.SystemGetInfo return r0, r1 } -// UpdateDataUser provides a mock function with given fields: ctx, id, userData -func (_m *Service) UpdateDataUser(ctx context.Context, id string, userData models.UserData) ([]string, error) { - ret := _m.Called(ctx, id, userData) +// UpdateAPIKey provides a mock function with given fields: ctx, req +func (_m *Service) UpdateAPIKey(ctx context.Context, req *requests.UpdateAPIKey) error { + ret := _m.Called(ctx, req) if len(ret) == 0 { - panic("no return value specified for UpdateDataUser") - } - - var r0 []string - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, models.UserData) ([]string, error)); ok { - return rf(ctx, id, userData) - } - if rf, ok := ret.Get(0).(func(context.Context, string, models.UserData) []string); ok { - r0 = rf(ctx, id, userData) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]string) - } + panic("no return value specified for UpdateAPIKey") } - if rf, ok := ret.Get(1).(func(context.Context, string, models.UserData) error); ok { - r1 = rf(ctx, id, userData) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.UpdateAPIKey) error); ok { + r0 = rf(ctx, req) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// UpdateDevice provides a mock function with given fields: ctx, tenant, uid, name, publicURL -func (_m *Service) UpdateDevice(ctx context.Context, tenant string, uid models.UID, name *string, publicURL *bool) error { - ret := _m.Called(ctx, tenant, uid, name, publicURL) +// UpdateDevice provides a mock function with given fields: ctx, req +func (_m *Service) UpdateDevice(ctx context.Context, req *requests.DeviceUpdate) error { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for UpdateDevice") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, models.UID, *string, *bool) error); ok { - r0 = rf(ctx, tenant, uid, name, publicURL) + if rf, ok := ret.Get(0).(func(context.Context, *requests.DeviceUpdate) error); ok { + r0 = rf(ctx, req) } else { r0 = ret.Error(0) } @@ -1512,17 +1535,17 @@ func (_m *Service) UpdateDevice(ctx context.Context, tenant string, uid models.U return r0 } -// UpdateDeviceStatus provides a mock function with given fields: ctx, tenant, uid, status -func (_m *Service) UpdateDeviceStatus(ctx context.Context, tenant string, uid models.UID, status models.DeviceStatus) error { - ret := _m.Called(ctx, tenant, uid, status) +// UpdateDeviceStatus provides a mock function with given fields: ctx, req +func (_m *Service) UpdateDeviceStatus(ctx context.Context, req *requests.DeviceUpdateStatus) error { + ret := _m.Called(ctx, req) if len(ret) == 0 { panic("no return value specified for UpdateDeviceStatus") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, models.UID, models.DeviceStatus) error); ok { - r0 = rf(ctx, tenant, uid, status) + if rf, ok := ret.Get(0).(func(context.Context, *requests.DeviceUpdateStatus) error); ok { + r0 = rf(ctx, req) } else { r0 = ret.Error(0) } @@ -1530,17 +1553,17 @@ func (_m *Service) UpdateDeviceStatus(ctx context.Context, tenant string, uid mo return r0 } -// UpdateDeviceTag provides a mock function with given fields: ctx, uid, tags -func (_m *Service) UpdateDeviceTag(ctx context.Context, uid models.UID, tags []string) error { - ret := _m.Called(ctx, uid, tags) +// UpdateNamespaceMember provides a mock function with given fields: ctx, req +func (_m *Service) UpdateNamespaceMember(ctx context.Context, req *requests.NamespaceUpdateMember) error { + ret := _m.Called(ctx, req) if len(ret) == 0 { - panic("no return value specified for UpdateDeviceTag") + panic("no return value specified for UpdateNamespaceMember") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, []string) error); ok { - r0 = rf(ctx, uid, tags) + if rf, ok := ret.Get(0).(func(context.Context, *requests.NamespaceUpdateMember) error); ok { + r0 = rf(ctx, req) } else { r0 = ret.Error(0) } @@ -1596,17 +1619,17 @@ func (_m *Service) UpdatePublicKey(ctx context.Context, fingerprint string, tena return r0, r1 } -// UpdatePublicKeyTags provides a mock function with given fields: ctx, tenant, fingerprint, tags -func (_m *Service) UpdatePublicKeyTags(ctx context.Context, tenant string, fingerprint string, tags []string) error { - ret := _m.Called(ctx, tenant, fingerprint, tags) +// UpdateSession provides a mock function with given fields: ctx, uid, model +func (_m *Service) UpdateSession(ctx context.Context, uid models.UID, model models.SessionUpdate) error { + ret := _m.Called(ctx, uid, model) if len(ret) == 0 { - panic("no return value specified for UpdatePublicKeyTags") + panic("no return value specified for UpdateSession") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, []string) error); ok { - r0 = rf(ctx, tenant, fingerprint, tags) + if rf, ok := ret.Get(0).(func(context.Context, models.UID, models.SessionUpdate) error); ok { + r0 = rf(ctx, uid, model) } else { r0 = ret.Error(0) } @@ -1614,6 +1637,66 @@ func (_m *Service) UpdatePublicKeyTags(ctx context.Context, tenant string, finge return r0 } +// UpdateTag provides a mock function with given fields: ctx, req +func (_m *Service) UpdateTag(ctx context.Context, req *requests.UpdateTag) ([]string, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for UpdateTag") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.UpdateTag) ([]string, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *requests.UpdateTag) []string); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *requests.UpdateTag) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateUser provides a mock function with given fields: ctx, req +func (_m *Service) UpdateUser(ctx context.Context, req *requests.UpdateUser) ([]string, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for UpdateUser") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *requests.UpdateUser) ([]string, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *requests.UpdateUser) []string); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *requests.UpdateUser) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // NewService creates a new instance of Service. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewService(t interface { diff --git a/api/services/namespace.go b/api/services/namespace.go index 875f6020642..a040de29759 100644 --- a/api/services/namespace.go +++ b/api/services/namespace.go @@ -2,87 +2,86 @@ package services import ( "context" + "errors" "strings" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" - req "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/envs" "github.com/shellhub-io/shellhub/pkg/models" "github.com/shellhub-io/shellhub/pkg/uuid" ) type NamespaceService interface { - ListNamespaces(ctx context.Context, pagination paginator.Query, filter []models.Filter, export bool) ([]models.Namespace, int, error) - CreateNamespace(ctx context.Context, namespace requests.NamespaceCreate, userID string) (*models.Namespace, error) + ListNamespaces(ctx context.Context, req *requests.NamespaceList) ([]models.Namespace, int, error) + CreateNamespace(ctx context.Context, namespace *requests.NamespaceCreate) (*models.Namespace, error) GetNamespace(ctx context.Context, tenantID string) (*models.Namespace, error) DeleteNamespace(ctx context.Context, tenantID string) error - EditNamespace(ctx context.Context, tenantID, name string) (*models.Namespace, error) - AddNamespaceUser(ctx context.Context, memberUsername, memberRole, tenantID, userID string) (*models.Namespace, error) - RemoveNamespaceUser(ctx context.Context, tenantID, memberID, userID string) (*models.Namespace, error) - EditNamespaceUser(ctx context.Context, tenantID, userID, memberID, memberNewRole string) error EditSessionRecordStatus(ctx context.Context, sessionRecord bool, tenantID string) error GetSessionRecord(ctx context.Context, tenantID string) (bool, error) } -// ListNamespaces lists selected namespaces from a user. -// -// It receives a context, used to "control" the request flow, a pagination query, that indicate how many registers are -// requested per page, a filter string, a base64 encoded value what is converted to a slice of models.Filter and an -// export flag. -// -// ListNamespaces returns a slice of models.Namespace, the total of namespaces and an error. When error is not nil, the -// slice of models.Namespace is nil, total is zero. -func (s *service) ListNamespaces(ctx context.Context, pagination paginator.Query, filter []models.Filter, export bool) ([]models.Namespace, int, error) { - namespaces, count, err := s.store.NamespaceList(ctx, pagination, filter, export) - if err != nil { - return nil, 0, NewErrNamespaceList(err) - } - - for index, namespace := range namespaces { - members, err := s.fillMembersData(ctx, namespace.Members) - if err != nil { - return nil, 0, NewErrNamespaceMemberFillData(err) - } - - namespaces[index].Members = members - } - - return namespaces, count, nil -} - // CreateNamespace creates a new namespace. -func (s *service) CreateNamespace(ctx context.Context, namespace requests.NamespaceCreate, userID string) (*models.Namespace, error) { - user, _, err := s.store.UserGetByID(ctx, userID, false) +func (s *service) CreateNamespace(ctx context.Context, req *requests.NamespaceCreate) (*models.Namespace, error) { + user, err := s.store.UserResolve(ctx, store.UserIDResolver, req.UserID) if err != nil || user == nil { - return nil, NewErrUserNotFound(userID, err) + return nil, NewErrUserNotFound(req.UserID, err) + } + + // When MaxNamespaces is less than zero, it means that the user has no limit + // of namespaces. If the value is zero, it means he has no right to create a new namespace + if user.MaxNamespaces == 0 { + return nil, NewErrNamespaceCreationIsForbidden(user.MaxNamespaces, nil) + } else if user.MaxNamespaces > 0 { + info, err := s.store.UserGetInfo(ctx, req.UserID) + switch { + case err != nil: + return nil, err + case len(info.OwnedNamespaces) >= user.MaxNamespaces: + return nil, NewErrNamespaceLimitReached(user.MaxNamespaces, nil) + } } - // When MaxNamespaces is less than zero, it means that the user has no limit of namespaces. - if user.MaxNamespaces > 0 && user.MaxNamespaces <= user.Namespaces { - return nil, NewErrNamespaceLimitReached(user.MaxNamespaces, nil) + conflictsTarget := &models.NamespaceConflicts{Name: strings.ToLower(req.Name)} + if _, has, err := s.store.NamespaceConflicts(ctx, conflictsTarget); has || err != nil { + return nil, NewErrNamespaceDuplicated(err) } ns := &models.Namespace{ - Name: strings.ToLower(namespace.Name), - Owner: user.ID, + Name: strings.ToLower(req.Name), + Owner: user.ID, + DevicesAcceptedCount: 0, + DevicesPendingCount: 0, + DevicesRejectedCount: 0, + DevicesRemovedCount: 0, Members: []models.Member{ { - ID: user.ID, - Role: guard.RoleOwner, + ID: user.ID, + Role: authorizer.RoleOwner, + AddedAt: clock.Now(), }, }, - Settings: &models.NamespaceSettings{SessionRecord: true}, - TenantID: namespace.TenantID, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: "", + }, + TenantID: req.TenantID, + Type: models.NewDefaultType(), } - if ok, err := s.validator.Struct(ns); !ok || err != nil { - return nil, NewErrNamespaceInvalid(err) + if envs.IsCommunity() { + ns.Settings.ConnectionAnnouncement = models.DefaultAnnouncementMessage } - if namespace.TenantID == "" { + if models.IsTypeTeam(req.Type) { + ns.Type = models.TypeTeam + } else if models.IsTypePersonal(req.Type) { + ns.Type = models.TypePersonal + } + + if req.TenantID == "" { ns.TenantID = uuid.Generate() } @@ -95,15 +94,6 @@ func (s *service) CreateNamespace(ctx context.Context, namespace requests.Namesp ns.MaxDevices = -1 } - otherNamespace, err := s.store.NamespaceGetByName(ctx, ns.Name) - if err != nil && err != store.ErrNoDocuments { - return nil, NewErrNamespaceNotFound(ns.Name, err) - } - - if otherNamespace != nil { - return nil, NewErrNamespaceDuplicated(nil) - } - if _, err := s.store.NamespaceCreate(ctx, ns); err != nil { return nil, NewErrNamespaceCreateStore(err) } @@ -111,24 +101,26 @@ func (s *service) CreateNamespace(ctx context.Context, namespace requests.Namesp return ns, nil } +func (s *service) ListNamespaces(ctx context.Context, req *requests.NamespaceList) ([]models.Namespace, int, error) { + namespaces, count, err := s.store.NamespaceList(ctx, s.store.Options().Match(&req.Filters), s.store.Options().Paginate(&req.Paginator)) + if err != nil { + return nil, 0, NewErrNamespaceList(err) + } + + return namespaces, count, nil +} + // GetNamespace gets a namespace. // // It receives a context, used to "control" the request flow and the tenant ID from models.Namespace. // // GetNamespace returns a models.Namespace and an error. When error is not nil, the models.Namespace is nil. func (s *service) GetNamespace(ctx context.Context, tenantID string) (*models.Namespace, error) { - namespace, err := s.store.NamespaceGet(ctx, tenantID) + namespace, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, tenantID) if err != nil || namespace == nil { return nil, NewErrNamespaceNotFound(tenantID, err) } - members, err := s.fillMembersData(ctx, namespace.Members) - if err != nil { - return nil, NewErrNamespaceMemberFillData(err) - } - - namespace.Members = members - return namespace, nil } @@ -139,7 +131,7 @@ func (s *service) GetNamespace(ctx context.Context, tenantID string) (*models.Na // When cloud and billing is enabled, it will try to delete the namespace's billing information from the billing // service if it exists. func (s *service) DeleteNamespace(ctx context.Context, tenantID string) error { - ns, err := s.store.NamespaceGet(ctx, tenantID) + n, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, tenantID) if err != nil { return NewErrNamespaceNotFound(tenantID, err) } @@ -148,230 +140,65 @@ func (s *service) DeleteNamespace(ctx context.Context, tenantID string) error { return !ns.Billing.IsNil() && ns.Billing.HasCutomer() && ns.Billing.HasSubscription() } - if envs.IsCloud() && envs.HasBilling() && ableToReportDeleteNamespace(ns) { - if err := s.BillingReport(s.client.(req.Client), tenantID, ReportNamespaceDelete); err != nil { + if envs.IsCloud() && ableToReportDeleteNamespace(n) { + if err := s.BillingReport(ctx, s.client, tenantID, ReportNamespaceDelete); err != nil { return NewErrBillingReportNamespaceDelete(err) } } - return s.store.NamespaceDelete(ctx, tenantID) + return s.store.NamespaceDelete(ctx, n) } -// fillMembersData fill the member data with the user data. -// -// This method exist because the namespace stores only the user ID and the role from its member as a list of models.Member. -// To avoid unnecessary calls to store for member information, member username, this "conversion" is ony made when -// required by the service. -// -// It receives a context, used to "control" the request flow and a slice of models.Member with just ID and return an -// other slice with ID, username and role set. -// -// fillMembersData returns a slice of models.Member and an error. When error is not nil, the slice of models.Member is nil. -func (s *service) fillMembersData(ctx context.Context, members []models.Member) ([]models.Member, error) { - for index, member := range members { - user, _, err := s.store.UserGetByID(ctx, member.ID, false) - if err != nil || user == nil { - return nil, NewErrUserNotFound(member.ID, err) - } - - members[index] = models.Member{ID: user.ID, Username: user.Username, Role: member.Role} - } - - return members, nil -} - -// EditNamespace edits the namespace name. -// -// It receives a context, used to "control" the request flow, tenant ID from models.Namespace and the new name to -// namespace. Name is set to lowercase. -// -// EditNamespace returns a models.Namespace and an error. When error is not nil, the models.Namespace is nil. -func (s *service) EditNamespace(ctx context.Context, tenantID, name string) (*models.Namespace, error) { - namespace, err := s.store.NamespaceGet(ctx, tenantID) +func (s *service) EditNamespace(ctx context.Context, req *requests.NamespaceEdit) (*models.Namespace, error) { + namespace, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.Tenant) if err != nil { - return nil, NewErrNamespaceNotFound(tenantID, err) + return nil, NewErrNamespaceNotFound(req.Tenant, err) } - name = strings.ToLower(name) - if ok, err := s.validator.Struct(&models.Namespace{Name: name}); !ok || err != nil { - return nil, NewErrNamespaceInvalid(err) + if req.Name != "" && !strings.EqualFold(req.Name, namespace.Name) { + namespace.Name = strings.ToLower(req.Name) } - if namespace.Name == name { - return nil, NewErrNamespaceDuplicated(nil) + if req.Settings.SessionRecord != nil { + namespace.Settings.SessionRecord = *req.Settings.SessionRecord } - return s.store.NamespaceRename(ctx, namespace.TenantID, name) -} - -// AddNamespaceUser adds a member to a namespace. -// -// It receives a context, used to "control" the request flow, the member's name, the member's role, the tenant ID from -// models.Namespace what receive the member and the user ID from models.User who is adding the new member. -// -// If user from user's ID has a role what does not allow to add a new member or the member's role is the same as the user -// one, AddNamespaceUser will return error. -// -// AddNamespaceUser returns a models.Namespace and an error. When error is not nil, the models.Namespace is nil. -func (s *service) AddNamespaceUser(ctx context.Context, memberUsername, memberRole, tenantID, userID string) (*models.Namespace, error) { - if ok, err := s.validator.Struct(models.Member{Username: memberUsername, Role: memberRole}); !ok || err != nil { - return nil, NewErrNamespaceMemberInvalid(err) - } - - namespace, err := s.store.NamespaceGet(ctx, tenantID) - if err != nil || namespace == nil { - return nil, NewErrNamespaceNotFound(tenantID, err) - } - - // user is the user who is adding the new member. - user, _, err := s.store.UserGetByID(ctx, userID, false) - if err != nil || user == nil { - return nil, NewErrUserNotFound(userID, err) + if req.Settings.ConnectionAnnouncement != nil { + namespace.Settings.ConnectionAnnouncement = *req.Settings.ConnectionAnnouncement } - // checks if the active member is in the namespace. user is the active member. - active, ok := namespace.FindMember(user.ID) - if !ok { - return nil, NewErrNamespaceMemberNotFound(user.ID, err) - } - - passive, err := s.store.UserGetByUsername(ctx, memberUsername) - if err != nil { - return nil, NewErrUserNotFound(memberUsername, err) - } - - // checks if the passive member is in the namespace. - if _, ok = namespace.FindMember(passive.ID); ok { - return nil, NewErrNamespaceMemberDuplicated(passive.ID, nil) - } - - if !guard.CheckRole(active.Role, memberRole) { - return nil, guard.ErrForbidden - } - - return s.store.NamespaceAddMember(ctx, tenantID, passive.ID, memberRole) -} - -// RemoveNamespaceUser removes member from a namespace. -// -// It receives a context, used to "control" the request flow, the tenant ID from models.Namespace, member ID to remove -// and the user ID from models.User who is removing the member. -// -// If user from user's ID has a role what does not allow to remove a member or the member's role is the same as the user -// one, RemoveNamespaceUser will return error. -// -// RemoveNamespaceUser returns a models.Namespace and an error. When error is not nil, the models.Namespace is nil. -func (s *service) RemoveNamespaceUser(ctx context.Context, tenantID, memberID, userID string) (*models.Namespace, error) { - namespace, err := s.store.NamespaceGet(ctx, tenantID) - if err != nil { - return nil, NewErrNamespaceNotFound(tenantID, err) - } - - // checks if the user exist. - // user is the user who is removing the member. - user, _, err := s.store.UserGetByID(ctx, userID, false) - if err != nil { - return nil, NewErrUserNotFound(userID, err) - } - - // checks if the member exist. - // member is the member who will be removed. - member, _, err := s.store.UserGetByID(ctx, memberID, false) - if err != nil { - return nil, NewErrUserNotFound(memberID, err) - } - - // checks if the active member is in the namespace. user is the active member. - active, ok := namespace.FindMember(user.ID) - if !ok { - return nil, NewErrNamespaceMemberNotFound(user.ID, err) - } - - // checks if the passive member is in the namespace. member is the passive member. - passive, ok := namespace.FindMember(member.ID) - if !ok { - return nil, NewErrNamespaceMemberNotFound(member.ID, err) - } - - // checks if the active member can act over the passive member. - if !guard.CheckRole(active.Role, passive.Role) { - return nil, guard.ErrForbidden - } - - removed, err := s.store.NamespaceRemoveMember(ctx, tenantID, member.ID) - if err != nil { + if err := s.store.NamespaceUpdate(ctx, namespace); err != nil { return nil, err } - s.AuthUncacheToken(ctx, namespace.TenantID, member.ID) // nolint: errcheck - - return removed, nil + return s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.Tenant) } -// EditNamespaceUser edits a member's role. +// EditSessionRecordStatus defines if the sessions will be recorded. // -// It receives a context, used to "control" the request flow, the tenant ID from models.Namespace, user ID from -// models.User who is editing the member and the member's new role. +// It receives a context, used to "control" the request flow, a boolean to define if the sessions will be recorded and +// the tenant ID from models.Namespace. // -// If user from user's ID has a role what does not allow to edit a member or the member's role is the same as the user -// one, EditNamespaceUser will return error. -func (s *service) EditNamespaceUser(ctx context.Context, tenantID, userID, memberID, memberNewRole string) error { - namespace, err := s.store.NamespaceGet(ctx, tenantID) - if err != nil { - return NewErrNamespaceNotFound(tenantID, err) - } - - // user is the user who is editing the member. - user, _, err := s.store.UserGetByID(ctx, userID, false) - if err != nil { - return NewErrUserNotFound(userID, err) - } - - // member is the member who will be edited. - member, _, err := s.store.UserGetByID(ctx, memberID, false) +// This method is deprecated, use [NamespaceService#EditNamespace] instead. +func (s *service) EditSessionRecordStatus(ctx context.Context, sessionRecord bool, tenantID string) error { + n, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, tenantID) if err != nil { - return NewErrUserNotFound(memberID, err) - } - - // checks if the active member is in the namespace. user is the active member. - active, ok := namespace.FindMember(user.ID) - if !ok { - return NewErrNamespaceMemberNotFound(user.ID, err) - } - - // checks if the passive member is in the namespace. member is the passive member. - passive, ok := namespace.FindMember(member.ID) - if !ok { - return NewErrNamespaceMemberNotFound(member.ID, err) - } - - // Blocks if the active member's role is equal to the passive one. - if passive.Role == active.Role { - return guard.ErrForbidden - } - - // checks if the active member can act over the passive member. - if !guard.CheckRole(active.Role, memberNewRole) { - return guard.ErrForbidden + switch { + case errors.Is(err, store.ErrNoDocuments): + return NewErrNamespaceNotFound(tenantID, err) + default: + return err + } } - if err := s.store.NamespaceEditMember(ctx, tenantID, member.ID, memberNewRole); err != nil { + n.Settings.SessionRecord = sessionRecord + if err := s.store.NamespaceUpdate(ctx, n); err != nil { // nolint:revive return err } - s.AuthUncacheToken(ctx, namespace.TenantID, member.ID) // nolint: errcheck - return nil } -// EditSessionRecordStatus defines if the sessions will be recorded. -// -// It receives a context, used to "control" the request flow, a boolean to define if the sessions will be recorded and -// the tenant ID from models.Namespace. -func (s *service) EditSessionRecordStatus(ctx context.Context, sessionRecord bool, tenantID string) error { - return s.store.NamespaceSetSessionRecord(ctx, sessionRecord, tenantID) -} - // GetSessionRecord gets the session record data. // // It receives a context, used to "control" the request flow, the tenant ID from models.Namespace. @@ -379,9 +206,10 @@ func (s *service) EditSessionRecordStatus(ctx context.Context, sessionRecord boo // GetSessionRecord returns a boolean indicating the session record status and an error. When error is not nil, // the boolean is false. func (s *service) GetSessionRecord(ctx context.Context, tenantID string) (bool, error) { - if _, err := s.store.NamespaceGet(ctx, tenantID); err != nil { + n, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, tenantID) + if err != nil { return false, NewErrNamespaceNotFound(tenantID, err) } - return s.store.NamespaceGetSessionRecord(ctx, tenantID) + return n.Settings.SessionRecord, nil } diff --git a/api/services/namespace_test.go b/api/services/namespace_test.go index 12f643b95aa..1af67dae757 100644 --- a/api/services/namespace_test.go +++ b/api/services/namespace_test.go @@ -3,25 +3,30 @@ package services import ( "context" "errors" - "strconv" - "strings" "testing" + "time" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mocks" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + storemock "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/api/requests" storecache "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" "github.com/shellhub-io/shellhub/pkg/models" "github.com/shellhub-io/shellhub/pkg/uuid" - uuid_mocks "github.com/shellhub-io/shellhub/pkg/uuid/mocks" - "github.com/shellhub-io/shellhub/pkg/validator" + uuidmocks "github.com/shellhub-io/shellhub/pkg/uuid/mocks" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" ) func TestListNamespaces(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() @@ -33,17 +38,31 @@ func TestListNamespaces(t *testing.T) { cases := []struct { description string - pagination paginator.Query + req *requests.NamespaceList ctx context.Context requiredMocks func() expected Expected }{ { description: "fail when could not get the namespace list", - pagination: paginator.Query{Page: 1, PerPage: 10}, - ctx: ctx, + req: &requests.NamespaceList{ + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Filters: query.Filters{}, + }, + ctx: ctx, requiredMocks: func() { - mock.On("NamespaceList", ctx, paginator.Query{Page: 1, PerPage: 10}, []models.Filter(nil), false).Return(nil, 0, errors.New("error")).Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, 0, errors.New("error")). + Once() }, expected: Expected{ namespaces: nil, @@ -52,173 +71,154 @@ func TestListNamespaces(t *testing.T) { }, }, { - description: "fail when could not get a user", - pagination: paginator.Query{Page: 1, PerPage: 10}, - ctx: ctx, + description: "success to get the namespace list", + req: &requests.NamespaceList{ + Paginator: query.Paginator{Page: 1, PerPage: 10}, + Filters: query.Filters{}, + }, + ctx: ctx, requiredMocks: func() { - namespaces := []models.Namespace{ - { - Name: "group1", - Owner: "hash", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("NamespaceList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + []models.Namespace{ { - ID: "hash", - Role: guard.RoleOwner, + Name: "group1", + Owner: "66ffe0745a82ba5c4fe842ac", + TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "66ffe0745a82ba5c4fe842ac", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", + }, + }, }, - }, - }, - { - Name: "group2", - Owner: "hash", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf48471i4", - Members: []models.Member{ { - ID: "hash", - Role: guard.RoleOwner, + Name: "group2", + Owner: "66ffe0745a82ba5c4fe842ac", + TenantID: "a736a52b-5777-4f92-b0b8-e359bf48471i4", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "66ffe0745a82ba5c4fe842ac", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", + }, + { + ID: "66ffe0232da6d319c9769afb", + Role: authorizer.RoleObserver, + Email: "jane.smith@test.com", + }, + }, }, { - ID: "hash2", - Role: guard.RoleObserver, + Name: "group3", + Owner: "66ffe0745a82ba5c4fe842ac", + TenantID: "a736a52b-5777-4f92-b0b8-e359bf48471i4", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "66ffe0745a82ba5c4fe842ac", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", + }, + { + ID: "66ffe0232da6d319c9769afb", + Role: authorizer.RoleObserver, + Email: "jane.smith@test.com", + }, + }, }, }, - }, - } - - mock.On("NamespaceList", ctx, paginator.Query{Page: 1, PerPage: 10}, []models.Filter(nil), false).Return(namespaces, len(namespaces), nil).Once() - mock.On("UserGetByID", ctx, "hash", false).Return(nil, 0, errors.New("error")).Once() + 2, + nil, + ). + Once() }, expected: Expected{ - namespaces: nil, - count: 0, - err: NewErrNamespaceMemberFillData(NewErrUserNotFound("hash", errors.New("error"))), - }, - }, - { - description: "success to get the namespace list", - pagination: paginator.Query{Page: 1, PerPage: 10}, - ctx: ctx, - requiredMocks: func() { - namespaces := []models.Namespace{ + namespaces: []models.Namespace{ { Name: "group1", - Owner: "hash", + Owner: "66ffe0745a82ba5c4fe842ac", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + Type: models.TypeTeam, Members: []models.Member{ { - ID: "hash", - Role: guard.RoleOwner, + ID: "66ffe0745a82ba5c4fe842ac", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", }, }, }, { Name: "group2", - Owner: "hash", + Owner: "66ffe0745a82ba5c4fe842ac", TenantID: "a736a52b-5777-4f92-b0b8-e359bf48471i4", + Type: models.TypeTeam, Members: []models.Member{ { - ID: "hash", - Role: guard.RoleOwner, + ID: "66ffe0745a82ba5c4fe842ac", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", }, { - ID: "hash2", - Role: guard.RoleObserver, + ID: "66ffe0232da6d319c9769afb", + Role: authorizer.RoleObserver, + Email: "jane.smith@test.com", }, }, }, - } - - user := &models.User{ - UserData: models.UserData{ - Name: "user", - Username: "hash", - }, - ID: "hash", - } - - user1 := &models.User{ - UserData: models.UserData{ - Name: "user2", - Username: "hash2", - }, - ID: "hash2", - } - - // TODO: Add mock to fillMembersData what will replace the three call to UserGetByID. - mock.On("NamespaceList", ctx, paginator.Query{Page: 1, PerPage: 10}, []models.Filter(nil), false).Return(namespaces, len(namespaces), nil).Once() - mock.On("UserGetByID", ctx, "hash", false).Return(user, 0, nil).Once() - mock.On("UserGetByID", ctx, "hash2", false).Return(user1, 0, nil).Once() - mock.On("UserGetByID", ctx, "hash", false).Return(user, 0, nil).Once() - }, - expected: Expected{ - namespaces: []models.Namespace{ { - Name: "group1", Owner: "hash", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + Name: "group3", + Owner: "66ffe0745a82ba5c4fe842ac", + TenantID: "a736a52b-5777-4f92-b0b8-e359bf48471i4", + Type: models.TypeTeam, Members: []models.Member{ { - ID: "hash", - Username: "hash", - Role: guard.RoleOwner, + ID: "66ffe0745a82ba5c4fe842ac", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", }, - }, - }, - {Name: "group2", Owner: "hash", TenantID: "a736a52b-5777-4f92-b0b8-e359bf48471i4", Members: []models.Member{ - { - ID: "hash", - Username: "hash", - Role: guard.RoleOwner, - }, - { - ID: "hash2", - Username: "hash2", - Role: guard.RoleObserver, - }, - }}, - }, - count: len([]models.Namespace{ - { - Name: "group1", Owner: "hash", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ { - ID: "hash", - Username: "hash", - Role: guard.RoleOwner, + ID: "66ffe0232da6d319c9769afb", + Role: authorizer.RoleObserver, + Email: "jane.smith@test.com", }, }, }, - {Name: "group2", Owner: "hash", TenantID: "a736a52b-5777-4f92-b0b8-e359bf48471i4", Members: []models.Member{ - { - ID: "hash", - Username: "hash", - Role: guard.RoleOwner, - }, - { - ID: "hash2", - Username: "hash2", - Role: guard.RoleObserver, - }, - }}, - }), - err: nil, + }, + count: 2, + err: nil, }, }, } + s := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - services := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - nss, count, err := services.ListNamespaces(tc.ctx, tc.pagination, nil, false) + nss, count, err := s.ListNamespaces(tc.ctx, tc.req) assert.Equal(t, tc.expected, Expected{nss, count, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestGetNamespace(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(storemock.Store) ctx := context.TODO() @@ -229,190 +229,136 @@ func TestGetNamespace(t *testing.T) { cases := []struct { description string - user *models.User - namespace *models.Namespace - ctx context.Context + tenantID string requiredMocks func() expected Expected }{ { description: "fails when could not get the namespace", - ctx: ctx, - user: &models.User{UserData: models.UserData{Name: "user1", Username: "hash1"}, ID: "hash1"}, - namespace: &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - { - ID: "hash1", - Username: "hash1", - Role: guard.RoleOwner, - }, - }, - }, + tenantID: "00000000-0000-4000-0000-000000000000", requiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - { - ID: "hash1", - Username: "hash1", - Role: guard.RoleOwner, - }, - }, - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(nil, errors.New("error")).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, errors.New("error")). + Once() }, expected: Expected{ namespace: nil, - err: NewErrNamespaceNotFound("a736a52b-5777-4f92-b0b8-e359bf484713", errors.New("error")), + err: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", errors.New("error")), }, }, { - description: "succeeds", - ctx: ctx, - namespace: &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - { - ID: "hash1", - Username: "hash1", - Role: guard.RoleOwner, - }, - }, - }, - user: &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - }, + description: "succeeds - team", + tenantID: "00000000-0000-4000-0000-000000000000", requiredMocks: func() { - namespace := &models.Namespace{ + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + Name: "group1", + Owner: "66ffe21f76d5207a38a056d5", + TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "66ffe21f76d5207a38a056d5", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", + }, + }, + }, + nil, + ). + Once() + }, + expected: Expected{ + namespace: &models.Namespace{ Name: "group1", - Owner: "hash1", + Owner: "66ffe21f76d5207a38a056d5", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + Type: models.TypeTeam, Members: []models.Member{ { - ID: "hash1", - Username: "hash1", - Role: guard.RoleOwner, + ID: "66ffe21f76d5207a38a056d5", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", }, }, - } - - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, nil).Once() - }, - expected: Expected{ - namespace: &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Members: []models.Member{{ID: "hash1", Username: "hash1", Role: guard.RoleOwner}}}, - err: nil, - }, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - returnNamespace, err := service.GetNamespace(ctx, tc.namespace.TenantID) - assert.Equal(t, tc.expected, Expected{returnNamespace, err}) - }) - } - - mock.AssertExpectations(t) -} - -func TestSetMemberData(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - type Expected struct { - members []models.Member - err error - } - - cases := []struct { - description string - members []models.Member - requiredMocks func() - expected Expected - }{ - { - description: "fails when user is not found", - members: []models.Member{ - {ID: "hash1", Role: guard.RoleObserver}, - {ID: "hash2", Role: guard.RoleObserver}, - {ID: "hash3", Role: guard.RoleObserver}, - }, - requiredMocks: func() { - mock.On("UserGetByID", ctx, "hash1", false).Return(nil, 0, errors.New("error")).Once() - }, - expected: Expected{ - members: nil, - err: NewErrUserNotFound("hash1", errors.New("error")), + }, + err: nil, }, }, { - description: "success to fill member data", - members: []models.Member{ - {ID: "hash1", Role: guard.RoleObserver}, - {ID: "hash2", Role: guard.RoleObserver}, - {ID: "hash3", Role: guard.RoleObserver}, - {ID: "hash4", Role: guard.RoleOwner}, - }, + description: "succeeds - personal (with have changed to team temporarily)", + tenantID: "00000000-0000-4000-0000-000000000000", requiredMocks: func() { - mock.On("UserGetByID", ctx, "hash1", false).Return(&models.User{ID: "hash1", UserData: models.UserData{Username: "username1"}}, 0, nil).Once() - mock.On("UserGetByID", ctx, "hash2", false).Return(&models.User{ID: "hash2", UserData: models.UserData{Username: "username2"}}, 0, nil).Once() - mock.On("UserGetByID", ctx, "hash3", false).Return(&models.User{ID: "hash3", UserData: models.UserData{Username: "username3"}}, 0, nil).Once() - mock.On("UserGetByID", ctx, "hash4", false).Return(&models.User{ID: "hash4", UserData: models.UserData{Username: "username4"}}, 0, nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Namespace{ + Name: "group1", + Owner: "66ffe21f76d5207a38a056d5", + TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + Members: []models.Member{ + { + ID: "66ffe21f76d5207a38a056d5", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", + }, + }, + Type: models.TypeTeam, + }, + nil, + ). + Once() }, expected: Expected{ - members: []models.Member{ - {ID: "hash1", Username: "username1", Role: guard.RoleObserver}, - {ID: "hash2", Username: "username2", Role: guard.RoleObserver}, - {ID: "hash3", Username: "username3", Role: guard.RoleObserver}, - {ID: "hash4", Username: "username4", Role: guard.RoleOwner}, + namespace: &models.Namespace{ + Name: "group1", + Owner: "66ffe21f76d5207a38a056d5", + TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + Members: []models.Member{ + { + ID: "66ffe21f76d5207a38a056d5", + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", + }, + }, + Type: models.TypeTeam, }, err: nil, }, }, } + s := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - services := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - members, err := services.fillMembersData(ctx, tc.members) - assert.Equal(t, tc.expected, Expected{members, err}) + ns, err := s.GetNamespace(ctx, tc.tenantID) + assert.Equal(t, tc.expected, Expected{ns, err}) }) } + + storeMock.AssertExpectations(t) } func TestCreateNamespace(t *testing.T) { - mock := new(mocks.Store) + envMock := new(envmock.Backend) + storeMock := new(storemock.Store) + clockMock := new(clockmock.Clock) + + envs.DefaultBackend = envMock + clock.DefaultBackend = clockMock + + now := time.Now() + clockMock.On("Now").Return(now) ctx := context.TODO() - uuidMock := &uuid_mocks.Uuid{} + uuidMock := &uuidmocks.Uuid{} uuid.DefaultBackend = uuidMock type Expected struct { @@ -423,323 +369,566 @@ func TestCreateNamespace(t *testing.T) { cases := []struct { description string requiredMocks func() - ownerID string - namespace requests.NamespaceCreate + req *requests.NamespaceCreate expected Expected }{ { description: "fails when store user get has no documents", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", Name: "namespace", - TenantID: "xxxxx", + TenantID: "00000000-0000-4000-0000-000000000000", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - mock.On("UserGetByID", ctx, user.ID, false).Return(nil, 0, store.ErrNoDocuments).Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(nil, store.ErrNoDocuments). + Once() }, expected: Expected{ - nil, - NewErrUserNotFound("hash1", store.ErrNoDocuments), + ns: nil, + err: NewErrUserNotFound("000000000000000000000000", store.ErrNoDocuments), }, }, { - description: "fails when store user get fails", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ + description: "fails when user reachs the zero namespaces", + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", Name: "namespace", - TenantID: "xxxxx", + TenantID: "00000000-0000-4000-0000-000000000000", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, errors.New("error")).Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + MaxNamespaces: 0, + }, nil). + Once() }, expected: Expected{ - nil, - NewErrUserNotFound("hash1", errors.New("error")), + ns: nil, + err: NewErrNamespaceCreationIsForbidden(0, nil), }, }, { - description: "fails when a namespace field is invalid", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ - Name: "name.with.dot", + description: "fails when user reachs the max namespaces", + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", + Name: "namespace", + TenantID: "00000000-0000-4000-0000-000000000000", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, nil).Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + MaxNamespaces: 1, + }, nil). + Once() + storeMock. + On("UserGetInfo", ctx, "000000000000000000000000"). + Return( + &models.UserInfo{ + OwnedNamespaces: []models.Namespace{{}}, + AssociatedNamespaces: []models.Namespace{}, + }, + nil, + ). + Once() }, expected: Expected{ - nil, - NewErrNamespaceInvalid(validator.ErrStructureInvalid), + ns: nil, + err: NewErrNamespaceLimitReached(1, nil), }, }, { description: "fails when a namespace already exists", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", Name: "namespace", - TenantID: "xxxxx", + TenantID: "00000000-0000-4000-0000-000000000000", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - model := &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - Settings: &models.NamespaceSettings{ - SessionRecord: true, - }, - TenantID: "xxxxx", - } - - var isCloud bool - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return(strconv.FormatBool(isCloud)).Once() - mock.On("NamespaceGetByName", ctx, "namespace").Return(model, nil).Once() + storeMock. + On("UserGetInfo", ctx, "000000000000000000000000"). + Return( + &models.UserInfo{ + OwnedNamespaces: []models.Namespace{{}}, + AssociatedNamespaces: []models.Namespace{}, + }, + nil, + ). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + MaxNamespaces: 3, + }, nil). + Once() + storeMock. + On("NamespaceConflicts", ctx, &models.NamespaceConflicts{Name: "namespace"}). + Return(nil, true, nil). + Once() }, expected: Expected{ - nil, - NewErrNamespaceDuplicated(nil), + ns: nil, + err: NewErrNamespaceDuplicated(nil), }, }, { - description: "fails when store get namespace by name fails", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ + description: "fails when store namespace create fails", + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", Name: "namespace", - TenantID: "xxxxx", + TenantID: "00000000-0000-4000-0000-000000000000", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - model := &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - Settings: &models.NamespaceSettings{ - SessionRecord: true, - }, - TenantID: "xxxxx", - } - - var isCloud bool - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return(strconv.FormatBool(isCloud)).Once() - mock.On("NamespaceGetByName", ctx, "namespace").Return(model, errors.New("error")).Once() + storeMock. + On("UserGetInfo", ctx, "000000000000000000000000"). + Return( + &models.UserInfo{ + OwnedNamespaces: []models.Namespace{{}}, + AssociatedNamespaces: []models.Namespace{}, + }, + nil, + ). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + MaxNamespaces: 3, + }, nil). + Once() + storeMock. + On("NamespaceConflicts", ctx, &models.NamespaceConflicts{Name: "namespace"}). + Return(nil, false, nil). + Once() + // envs.IsCommunity = true + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("false"). + Once() + // -- + // envs.IsCloud = false + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Twice() + // -- + storeMock. + On( + "NamespaceCreate", + ctx, + &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, + MaxDevices: -1, + }, + ). + Return("", errors.New("error")). + Once() }, expected: Expected{ - nil, - NewErrNamespaceNotFound("namespace", errors.New("error")), + ns: nil, + err: NewErrNamespaceCreateStore(errors.New("error")), }, }, { - description: "fails when store namespace create fails", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ + description: "succeeds to create a namespace", + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", Name: "namespace", - TenantID: "xxxxx", + TenantID: "00000000-0000-4000-0000-000000000000", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - var isCloud bool - notCloudNamespace := &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", + storeMock. + On("UserGetInfo", ctx, "000000000000000000000000"). + Return( + &models.UserInfo{ + OwnedNamespaces: []models.Namespace{{}}, + AssociatedNamespaces: []models.Namespace{}, + }, + nil, + ). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + MaxNamespaces: 3, + }, nil). + Once() + storeMock. + On("NamespaceConflicts", ctx, &models.NamespaceConflicts{Name: "namespace"}). + Return(nil, false, nil). + Once() + // envs.IsCommunity = true + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Twice() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("false"). + Once() + // -- + // envs.IsCloud = false + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + // -- + storeMock. + On( + "NamespaceCreate", + ctx, + &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, + MaxDevices: -1, + }, + ). + Return("00000000-0000-4000-0000-000000000000", nil). + Once() + }, + expected: Expected{ + ns: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, - Settings: &models.NamespaceSettings{SessionRecord: true}, - TenantID: "xxxxx", MaxDevices: -1, - } - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, nil).Once() - mock.On("NamespaceGetByName", ctx, "namespace").Return(nil, nil).Once() - mock.On("NamespaceCreate", ctx, notCloudNamespace).Return(nil, errors.New("error")).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return(strconv.FormatBool(isCloud)).Once() - }, - expected: Expected{ - nil, NewErrNamespaceCreateStore(errors.New("error")), + }, + err: nil, }, }, { - description: "generates namespace with random tenant", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ - Name: "namespace", + description: "succeeds to create a namespace-:-without tenant id", + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", + Name: "namespace", + TenantID: "", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - var isCloud bool - notCloudNamespace := &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - Settings: &models.NamespaceSettings{SessionRecord: true}, - TenantID: "random_uuid", - MaxDevices: -1, - } - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, nil).Once() - uuidMock.On("Generate").Return("random_uuid").Once() - mock.On("NamespaceGetByName", ctx, "namespace").Return(nil, nil).Once() - mock.On("NamespaceCreate", ctx, notCloudNamespace).Return(notCloudNamespace, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return(strconv.FormatBool(isCloud)).Once() + storeMock. + On("UserGetInfo", ctx, "000000000000000000000000"). + Return( + &models.UserInfo{ + OwnedNamespaces: []models.Namespace{{}}, + AssociatedNamespaces: []models.Namespace{}, + }, + nil, + ). + Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + MaxNamespaces: 3, + }, nil). + Once() + storeMock. + On("NamespaceConflicts", ctx, &models.NamespaceConflicts{Name: "namespace"}). + Return(nil, false, nil). + Once() + // envs.IsCommunity = true + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Twice() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("false"). + Once() + // -- + // envs.IsCloud = false + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + // -- + uuidMock. + On("Generate"). + Return("4de9253f-4a2a-49e7-a748-26e7a009bd2e"). + Once() + storeMock. + On( + "NamespaceCreate", + ctx, + &models.Namespace{ + TenantID: "4de9253f-4a2a-49e7-a748-26e7a009bd2e", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, + MaxDevices: -1, + }, + ). + Return("00000000-0000-4000-0000-000000000000", nil). + Once() }, expected: Expected{ - &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", + ns: &models.Namespace{ + TenantID: "4de9253f-4a2a-49e7-a748-26e7a009bd2e", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, - Settings: &models.NamespaceSettings{SessionRecord: true}, - TenantID: "random_uuid", MaxDevices: -1, - }, nil, + }, + err: nil, }, }, { - description: "checks the enterprise&community instance", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ + description: "succeeds to create a namespace-:-env=cloud type team", + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", Name: "namespace", - TenantID: "xxxxx", + TenantID: "00000000-0000-4000-0000-000000000000", + Type: "team", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - var isCloud bool - notCloudNamespace := &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - Settings: &models.NamespaceSettings{SessionRecord: true}, - TenantID: "xxxxx", - MaxDevices: -1, - } - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, nil).Once() - mock.On("NamespaceGetByName", ctx, "namespace").Return(nil, nil).Once() - mock.On("NamespaceCreate", ctx, notCloudNamespace).Return(nil, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return(strconv.FormatBool(isCloud)).Once() + storeMock. + On("UserGetInfo", ctx, "000000000000000000000000"). + Return( + &models.UserInfo{ + OwnedNamespaces: []models.Namespace{{}}, + AssociatedNamespaces: []models.Namespace{}, + }, + nil, + ). + Once() + // envs.IsCommunity = false + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + MaxNamespaces: 3, + }, nil). + Once() + storeMock. + On("NamespaceConflicts", ctx, &models.NamespaceConflicts{Name: "namespace"}). + Return(nil, false, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Twice() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("true"). + Once() + // -- + // envs.IsCloud = true + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + // -- + storeMock. + On( + "NamespaceCreate", + ctx, + &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: "", + }, + MaxDevices: -1, + }, + ). + Return("00000000-0000-4000-0000-000000000000", nil). + Once() }, expected: Expected{ - &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", + ns: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: "", }, - Settings: &models.NamespaceSettings{SessionRecord: true}, - TenantID: "xxxxx", MaxDevices: -1, - }, nil, + }, + err: nil, }, }, { - description: "checks the cloud instance", - ownerID: "hash1", - namespace: requests.NamespaceCreate{ + description: "succeeds to create a namespace-:-env=cloud", + req: &requests.NamespaceCreate{ + UserID: "000000000000000000000000", Name: "namespace", - TenantID: "xxxxx", + TenantID: "00000000-0000-4000-0000-000000000000", + Type: "", }, requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "hash1", - }, - ID: "hash1", - } - - isCloud := true - cloudNamespace := &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - Settings: &models.NamespaceSettings{SessionRecord: true}, - TenantID: "xxxxx", - MaxDevices: 3, - } - mock.On("UserGetByID", ctx, user.ID, false).Return(user, 0, nil).Once() - mock.On("NamespaceGetByName", ctx, "namespace").Return(nil, nil).Once() - mock.On("NamespaceCreate", ctx, cloudNamespace).Return(nil, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return(strconv.FormatBool(isCloud)).Once() + storeMock. + On("UserGetInfo", ctx, "000000000000000000000000"). + Return( + &models.UserInfo{ + OwnedNamespaces: []models.Namespace{{}}, + AssociatedNamespaces: []models.Namespace{}, + }, + nil, + ). + Once() + // envs.IsCommunity = false + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(&models.User{ + ID: "000000000000000000000000", + MaxNamespaces: 3, + }, nil). + Once() + storeMock. + On("NamespaceConflicts", ctx, &models.NamespaceConflicts{Name: "namespace"}). + Return(nil, false, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Once() + envMock. + On("Get", "SHELLHUB_ENTERPRISE"). + Return("true"). + Once() + // -- + // envs.IsCloud = true + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("true"). + Twice() + // -- + storeMock. + On( + "NamespaceCreate", + ctx, + &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: "", + }, + MaxDevices: 3, + }, + ). + Return("00000000-0000-4000-0000-000000000000", nil). + Once() }, expected: Expected{ - &models.Namespace{ - Name: strings.ToLower("namespace"), - Owner: "hash1", + ns: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "namespace", + Owner: "000000000000000000000000", + Type: models.TypeTeam, Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: "", }, - Settings: &models.NamespaceSettings{SessionRecord: true}, - TenantID: "xxxxx", MaxDevices: 3, - }, nil, + }, + err: nil, }, }, } @@ -748,16 +937,18 @@ func TestCreateNamespace(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - returnedNamespace, err := service.CreateNamespace(ctx, tc.namespace, tc.ownerID) + service := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + returnedNamespace, err := service.CreateNamespace(ctx, tc.req) + assert.Equal(t, tc.expected, Expected{returnedNamespace, err}) + + storeMock.AssertExpectations(t) }) } - mock.AssertExpectations(t) } func TestEditNamespace(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(storemock.Store) ctx := context.TODO() @@ -774,98 +965,125 @@ func TestEditNamespace(t *testing.T) { expected Expected }{ { - description: "fails when the name is invalid", + description: "fails when namespace does not exist", + tenantID: "xxxxx", + namespaceName: "newname", requiredMocks: func() { - namespace := &models.Namespace{ - Name: "oldname", - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - TenantID: "xxxxx", - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxxx"). + Return(nil, store.ErrNoDocuments). + Once() }, - tenantID: "xxxxx", - namespaceName: "name.with.dot", expected: Expected{ nil, - NewErrNamespaceInvalid(validator.ErrStructureInvalid), + NewErrNamespaceNotFound("xxxxx", store.ErrNoDocuments), }, }, { - description: "fails when the name is the same", + description: "fails when the store namespace update fails", + tenantID: "xxxxx", + namespaceName: "newname", requiredMocks: func() { namespace := &models.Namespace{ - Name: "oldname", - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, TenantID: "xxxxx", + Name: "oldname", + Settings: &models.NamespaceSettings{SessionRecord: false}, } + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxxx"). + Return(namespace, nil). + Once() - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() + expectedNamespace := *namespace + expectedNamespace.Name = "newname" + storeMock. + On("NamespaceUpdate", ctx, &expectedNamespace). + Return(errors.New("error")). + Once() }, - tenantID: "xxxxx", - namespaceName: "oldname", expected: Expected{ nil, - NewErrNamespaceDuplicated(nil), + errors.New("error"), }, }, { - description: "fails when the store namespace rename fails", + description: "succeeds changing the name to lowercase", + namespaceName: "newName", + tenantID: "xxxxx", requiredMocks: func() { namespace := &models.Namespace{ - Name: "oldname", - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, TenantID: "xxxxx", + Name: "oldname", + Settings: &models.NamespaceSettings{SessionRecord: false}, + } + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxxx"). + Return(namespace, nil). + Once() + + expectedNamespace := *namespace + expectedNamespace.Name = "newname" + storeMock. + On("NamespaceUpdate", ctx, &expectedNamespace). + Return(nil). + Once() + + finalNamespace := &models.Namespace{ + TenantID: "xxxxx", + Name: "newname", + Settings: &models.NamespaceSettings{SessionRecord: false}, } - - newName := "newname" - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("NamespaceRename", ctx, namespace.TenantID, newName).Return(nil, errors.New("error")).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxxx"). + Return(finalNamespace, nil). + Once() }, - namespaceName: "newname", - tenantID: "xxxxx", expected: Expected{ + &models.Namespace{ + TenantID: "xxxxx", + Name: "newname", + Settings: &models.NamespaceSettings{SessionRecord: false}, + }, nil, - errors.New("error"), }, }, { - description: "succeeds", + description: "succeeds", + namespaceName: "newname", + tenantID: "xxxxx", requiredMocks: func() { namespace := &models.Namespace{ - Name: "oldname", - Owner: "hash1", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, TenantID: "xxxxx", - } - - newName := "newname" - newNamespace := &models.Namespace{ - Name: newName, - Owner: "hash1", + Name: "oldname", + Settings: &models.NamespaceSettings{SessionRecord: false}, + } + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxxx"). + Return(namespace, nil). + Once() + + expectedNamespace := *namespace + expectedNamespace.Name = "newname" + storeMock. + On("NamespaceUpdate", ctx, &expectedNamespace). + Return(nil). + Once() + + finalNamespace := &models.Namespace{ TenantID: "xxxxx", + Name: "newname", + Settings: &models.NamespaceSettings{SessionRecord: false}, } - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("NamespaceRename", ctx, namespace.TenantID, newName).Return(newNamespace, nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxxx"). + Return(finalNamespace, nil). + Once() }, - namespaceName: "newname", - tenantID: "xxxxx", expected: Expected{ &models.Namespace{ - Name: "newname", - Owner: "hash1", TenantID: "xxxxx", + Name: "newname", + Settings: &models.NamespaceSettings{SessionRecord: false}, }, nil, }, @@ -875,1212 +1093,270 @@ func TestEditNamespace(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - returnedNamespace, err := service.EditNamespace(ctx, tc.tenantID, tc.namespaceName) - assert.Equal(t, tc.expected, Expected{returnedNamespace, err}) + service := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + + req := &requests.NamespaceEdit{ + TenantParam: requests.TenantParam{Tenant: tc.tenantID}, + Name: tc.namespaceName, + } + namespace, err := service.EditNamespace(ctx, req) + + assert.Equal(t, tc.expected, Expected{namespace, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } -func TestDeleteNamespace(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() +func TestEditSessionRecord(t *testing.T) { + storeMock := new(storemock.Store) cases := []struct { - description string - namespace *models.Namespace - requiredMocks func(namespace *models.Namespace) + name string + sessionRecord bool + tenantID string + mocks func(context.Context) expected error }{ { - description: "fails when namespace does not exist", - namespace: &models.Namespace{Name: "oldname", Owner: "ID1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Members: []models.Member{{ID: "user1", Role: guard.RoleOwner}}}, - requiredMocks: func(namespace *models.Namespace) { - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(nil, errors.New("error")).Once() + name: "fails when namespace not found", + sessionRecord: true, + tenantID: "xxxx", + mocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxx"). + Return(nil, store.ErrNoDocuments). + Once() }, - expected: NewErrNamespaceNotFound("a736a52b-5777-4f92-b0b8-e359bf484713", errors.New("error")), + expected: NewErrNamespaceNotFound("xxxx", store.ErrNoDocuments), }, { - description: "fails when store delete fails", - namespace: &models.Namespace{Name: "oldname", Owner: "ID1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Members: []models.Member{{ID: "user1", Role: guard.RoleOwner}}}, - requiredMocks: func(namespace *models.Namespace) { - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - envMock.On("Get", "SHELLHUB_BILLING").Return("false").Once() - mock.On("NamespaceDelete", ctx, namespace.TenantID).Return(errors.New("error")).Once() + name: "fails when namespace update fails", + sessionRecord: true, + tenantID: "xxxx", + mocks: func(ctx context.Context) { + namespace := &models.Namespace{ + TenantID: "xxxx", + Settings: &models.NamespaceSettings{SessionRecord: false}, + } + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxx"). + Return(namespace, nil). + Once() + + expectedNamespace := *namespace + expectedNamespace.Settings.SessionRecord = true + storeMock. + On("NamespaceUpdate", ctx, &expectedNamespace). + Return(errors.New("error")). + Once() }, expected: errors.New("error"), }, { - description: "succeeds", - namespace: &models.Namespace{Name: "oldname", Owner: "ID1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Members: []models.Member{{ID: "user1", Role: guard.RoleOwner}}}, - requiredMocks: func(namespace *models.Namespace) { - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() - envMock.On("Get", "SHELLHUB_BILLING").Return("false").Once() - mock.On("NamespaceDelete", ctx, namespace.TenantID).Return(nil).Once() + name: "succeeds", + sessionRecord: true, + tenantID: "xxxx", + mocks: func(ctx context.Context) { + namespace := &models.Namespace{ + TenantID: "xxxx", + Settings: &models.NamespaceSettings{SessionRecord: false}, + } + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxx"). + Return(namespace, nil). + Once() + + expectedNamespace := *namespace + expectedNamespace.Settings.SessionRecord = true + storeMock. + On("NamespaceUpdate", ctx, &expectedNamespace). + Return(nil). + Once() }, expected: nil, }, { - description: "reports delete", - namespace: &models.Namespace{Name: "oldname", Owner: "ID1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Members: []models.Member{{ID: "user1", Role: guard.RoleOwner}}}, - requiredMocks: func(namespace *models.Namespace) { - user1 := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "user1", - Email: "user1@email.com", - }, - ID: "ID1", + name: "succeeds when settings is nil", + sessionRecord: true, + tenantID: "xxxx", + mocks: func(ctx context.Context) { + namespace := &models.Namespace{ + TenantID: "xxxx", + Settings: &models.NamespaceSettings{}, } + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "xxxx"). + Return(namespace, nil). + Once() - ns := &models.Namespace{ - TenantID: namespace.TenantID, - Owner: user1.ID, - Members: []models.Member{ - {ID: user1.ID, Role: guard.RoleOwner}, - }, - Billing: &models.Billing{ - Active: true, - }, - MaxDevices: -1, - } - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(ns, nil).Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return(strconv.FormatBool(true)).Once() - envMock.On("Get", "SHELLHUB_BILLING").Return(strconv.FormatBool(true)).Once() - clientMock.On("ReportDelete", ns).Return(200, nil).Once() - mock.On("NamespaceDelete", ctx, namespace.TenantID).Return(nil).Once() + expectedNamespace := *namespace + expectedNamespace.Settings = &models.NamespaceSettings{SessionRecord: true} + storeMock. + On("NamespaceUpdate", ctx, &expectedNamespace). + Return(nil). + Once() }, expected: nil, }, } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks(tc.namespace) + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.DeleteNamespace(ctx, tc.namespace.TenantID) + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + ctx := context.Background() + tc.mocks(ctx) + err := s.EditSessionRecordStatus(ctx, tc.sessionRecord, tc.tenantID) assert.Equal(t, tc.expected, err) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } -func TestAddNamespaceUser(t *testing.T) { - mock := new(mocks.Store) +func TestDeleteNamespace(t *testing.T) { + storeMock := new(storemock.Store) ctx := context.TODO() - type Expected struct { - namespace *models.Namespace - err error - } - cases := []struct { description string - TenantID string - Username string - Role string - ID string - RequiredMocks func() - Expected Expected + tenantID string + requiredMocks func() + expected error }{ { - description: "fails when MemberID is not valid", - Username: "", - Role: guard.RoleObserver, - ID: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - RequiredMocks: func() { - }, - Expected: Expected{ - namespace: nil, - err: NewErrNamespaceMemberInvalid(validator.ErrStructureInvalid), - }, - }, - { - description: "fails when Role is not valid", - Username: "user2", - Role: "invalid", - ID: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - RequiredMocks: func() { - }, - Expected: Expected{ - namespace: nil, - err: NewErrNamespaceMemberInvalid(validator.ErrStructureInvalid), - }, - }, - { - description: "fails when the namespace was not found", - Username: "user2", - Role: guard.RoleObserver, - ID: "ID1", - TenantID: "tenantIDNotFound", - RequiredMocks: func() { - mock.On("NamespaceGet", ctx, "tenantIDNotFound").Return(nil, ErrNamespaceNotFound).Once() - }, - Expected: Expected{ - namespace: nil, - err: NewErrNamespaceNotFound("tenantIDNotFound", ErrNamespaceNotFound), - }, - }, - { - description: "fails when the active member was not found", - Username: "user1", - Role: guard.RoleObserver, - ID: "userIDNotFound", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "ID1", Role: guard.RoleOwner}, - }, - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - - mock.On("UserGetByID", ctx, "userIDNotFound", false).Return(nil, 0, ErrUserNotFound).Once() - }, - Expected: Expected{ - namespace: nil, - err: NewErrUserNotFound("userIDNotFound", ErrUserNotFound), - }, - }, - { - description: "fails when the active member is not on the namespace", - Username: "user1", - Role: guard.RoleObserver, - ID: "ID2", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "ID1", Role: guard.RoleOwner}, - }, - } - - user2 := &models.User{ - UserData: models.UserData{ - Name: "user2", - Username: "user2", - Email: "user2@email.com", - }, - ID: "ID2", - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - - mock.On("UserGetByID", ctx, user2.ID, false).Return(user2, 0, nil).Once() - }, - Expected: Expected{ - namespace: nil, - err: NewErrNamespaceMemberNotFound("ID2", nil), - }, - }, - { - description: "addNamespaceUser fails when passive member was not found", - Username: "usernamespacenotfound", - Role: guard.RoleObserver, - ID: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "ID1", Role: guard.RoleOwner}, - }, - } - - user1 := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "user1", - Email: "user1@email.com", - }, - ID: "ID1", - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - - mock.On("UserGetByID", ctx, user1.ID, false).Return(user1, 0, nil).Once() - mock.On("UserGetByUsername", ctx, "usernamespacenotfound").Return(nil, ErrBadRequest).Once() - }, - Expected: Expected{ - namespace: nil, - err: NewErrUserNotFound("usernamespacenotfound", ErrBadRequest), + description: "fails when namespace does not exist", + tenantID: "00000000-0000-4000-0000-000000000000", + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(nil, errors.New("error")). + Once() }, + expected: NewErrNamespaceNotFound("00000000-0000-4000-0000-000000000000", errors.New("error")), }, { - description: "fails when the passive member is on the namespace", - Username: "user2", - Role: guard.RoleObserver, - ID: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - RequiredMocks: func() { - user2 := &models.User{ - UserData: models.UserData{ - Name: "user2", - Username: "user2", - Email: "user2@email.com", - }, - ID: "ID2", - } - - user1 := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "user1", - Email: "user1@email.com", - }, - ID: "ID1", - } - - namespaceTwoMembers := &models.Namespace{ - Name: "group1", - Owner: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - Members: []models.Member{ - {ID: "ID1", Role: guard.RoleOwner}, - {ID: "ID2", Role: guard.RoleObserver}, - }, - } - - mock.On("NamespaceGet", ctx, namespaceTwoMembers.TenantID).Return(namespaceTwoMembers, nil).Once() - - mock.On("UserGetByID", ctx, user1.ID, false).Return(user1, 0, nil).Once() - mock.On("UserGetByUsername", ctx, user2.Username).Return(user2, nil).Once() - }, - Expected: Expected{ - namespace: nil, - err: NewErrNamespaceMemberDuplicated("ID2", nil), + description: "fails when store delete fails", + tenantID: "00000000-0000-4000-0000-000000000000", + requiredMocks: func() { + namespace := &models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000"} + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(namespace, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("NamespaceDelete", ctx, namespace). + Return(errors.New("error")). + Once() }, + expected: errors.New("error"), }, { description: "succeeds", - Username: "user2", - Role: guard.RoleObserver, - ID: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "ID1", Role: guard.RoleOwner}, - }, - } - - user1 := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "user1", - Email: "user1@email.com", - }, - ID: "ID1", - } - - user2 := &models.User{ - UserData: models.UserData{ - Name: "user2", - Username: "user2", - Email: "user2@email.com", - }, - ID: "ID2", - } - - namespaceTwoMembers := &models.Namespace{ - Name: "group1", - Owner: "ID1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - Members: []models.Member{ - {ID: "ID1", Role: guard.RoleOwner}, - {ID: "ID2", Role: guard.RoleObserver}, - }, - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - - mock.On("UserGetByID", ctx, user1.ID, false).Return(user1, 0, nil).Once() - mock.On("UserGetByUsername", ctx, user2.Username).Return(user2, nil).Once() - - mock.On("NamespaceAddMember", ctx, namespace.TenantID, user2.ID, guard.RoleObserver).Return(namespaceTwoMembers, nil).Once() - }, - Expected: Expected{ - namespace: &models.Namespace{Name: "group1", Owner: "ID1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", Members: []models.Member{{ID: "ID1", Role: guard.RoleOwner}, {ID: "ID2", Role: guard.RoleObserver}}}, - err: nil, + tenantID: "00000000-0000-4000-0000-000000000000", + requiredMocks: func() { + namespace := &models.Namespace{TenantID: "00000000-0000-4000-0000-000000000000"} + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "00000000-0000-4000-0000-000000000000"). + Return(namespace, nil). + Once() + envMock. + On("Get", "SHELLHUB_CLOUD"). + Return("false"). + Once() + storeMock. + On("NamespaceDelete", ctx, namespace). + Return(nil). + Once() }, + expected: nil, }, } + s := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - tc.RequiredMocks() + tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - ns, err := service.AddNamespaceUser(ctx, tc.Username, tc.Role, tc.TenantID, tc.ID) - assert.Equal(t, tc.Expected, Expected{ns, err}) + err := s.DeleteNamespace(ctx, tc.tenantID) + assert.Equal(t, tc.expected, err) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } -func TestRemoveNamespaceUser(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() +func TestGetSessionRecord(t *testing.T) { + storeMock := new(storemock.Store) type Expected struct { - namespace *models.Namespace - err error + status bool + err error } cases := []struct { - description string - RequiredMocks func() - TenantID string - UserID string - MemberID string - Expected Expected + name string + tenantID string + mocks func(context.Context) + expected Expected }{ { - description: "fails when namespace was not found", - RequiredMocks: func() { - mock.On("NamespaceGet", ctx, "tenantIDNotFound").Return(nil, errors.New("error")).Once() - }, - TenantID: "tenantIDNotFound", - MemberID: "hash1", - UserID: "hash1", - Expected: Expected{ - namespace: nil, - err: NewErrNamespaceNotFound("tenantIDNotFound", errors.New("error")), - }, - }, - { - description: "fails when active user was not found", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("UserGetByID", ctx, "invalidUserID", false).Return(nil, 0, ErrUserNotFound).Once() - }, - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - MemberID: "hash2", - UserID: "invalidUserID", - Expected: Expected{ - namespace: nil, - err: NewErrUserNotFound("invalidUserID", ErrUserNotFound), + name: "fails when namespace not found", + tenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + mocks: func(ctx context.Context) { + storeMock.On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "a736a52b-5777-4f92-b0b8-e359bf484713").Return(nil, store.ErrNoDocuments).Once() }, + expected: Expected{false, NewErrNamespaceNotFound("a736a52b-5777-4f92-b0b8-e359bf484713", store.ErrNoDocuments)}, }, { - description: "fails when passive user was not found", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - } - - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "username1", - }, - ID: "hash1", - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("UserGetByID", ctx, namespace.Owner, false).Return(user, 0, nil).Once() - mock.On("UserGetByID", ctx, "invalidPassiveMemberID", false).Return(nil, 0, ErrUserNotFound).Once() - }, - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - MemberID: "invalidPassiveMemberID", - UserID: "hash1", - Expected: Expected{ - namespace: nil, - err: NewErrUserNotFound("invalidPassiveMemberID", ErrUserNotFound), + name: "fails when store namespace resolve fails", + tenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + mocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "a736a52b-5777-4f92-b0b8-e359bf484713"). + Return(nil, errors.New("error")). + Once() }, + expected: Expected{false, NewErrNamespaceNotFound("a736a52b-5777-4f92-b0b8-e359bf484713", errors.New("error"))}, }, { - description: "fails when user is not a namespace's member", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - } - - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "username1", - }, - ID: "hash1", - } - - user2 := &models.User{ - UserData: models.UserData{ - Name: "user2", - Username: "username2", - }, - ID: "hash2", - } - - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("UserGetByID", ctx, namespace.Owner, false).Return(user, 0, nil).Once() - mock.On("UserGetByID", ctx, user2.ID, false).Return(user2, 0, nil).Once() - }, - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - MemberID: "hash2", - UserID: "hash1", - Expected: Expected{ - namespace: nil, - err: NewErrNamespaceMemberNotFound("hash2", nil), + name: "succeeds", + tenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + mocks: func(ctx context.Context) { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "a736a52b-5777-4f92-b0b8-e359bf484713"). + Return( + &models.Namespace{ + Name: "group1", + Owner: "hash1", + TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", + Settings: &models.NamespaceSettings{SessionRecord: false}, + }, + nil, + ). + Once() }, + expected: Expected{false, nil}, }, - { - description: "fails when user can not act over the role", - RequiredMocks: func() { - user2 := &models.User{ - UserData: models.UserData{ - Name: "user2", - Username: "username2", - }, - ID: "hash2", - } - - user3 := &models.User{ - UserData: models.UserData{ - Name: "user3", - Username: "username3", - }, - ID: "hash3", - } + } - namespaceThreeMembers := &models.Namespace{ - Name: "group2", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - Members: []models.Member{ - { - ID: "hash1", Role: guard.RoleOwner, - }, - { - ID: "hash2", Role: guard.RoleAdministrator, - }, - { - ID: "hash3", Role: guard.RoleAdministrator, - }, - }, - } - - mock.On("NamespaceGet", ctx, namespaceThreeMembers.TenantID).Return(namespaceThreeMembers, nil).Once() - mock.On("UserGetByID", ctx, user2.ID, false).Return(user2, 0, nil).Once() - mock.On("UserGetByID", ctx, user3.ID, false).Return(user3, 0, nil).Once() - }, - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - MemberID: "hash3", - UserID: "hash2", - Expected: Expected{ - namespace: nil, - err: guard.ErrForbidden, - }, - }, - { - description: "when NamespaceRemoveMember store's function fails", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - } - - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "username1", - }, - ID: "hash1", - } - - user2 := &models.User{ - UserData: models.UserData{ - Name: "user2", - Username: "username2", - }, - ID: "hash2", - } - - namespaceTwoMembers := &models.Namespace{ - Name: "group2", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - {ID: "hash2", Role: guard.RoleObserver}, - }, - } - - mock.On("NamespaceGet", ctx, namespaceTwoMembers.TenantID).Return(namespaceTwoMembers, nil).Once() - mock.On("UserGetByID", ctx, namespace.Owner, false).Return(user, 0, nil).Once() - mock.On("UserGetByID", ctx, user2.ID, false).Return(user2, 0, nil).Once() - - mock.On("NamespaceRemoveMember", ctx, namespaceTwoMembers.TenantID, user2.ID).Return(nil, errors.New("error")).Once() - }, - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - MemberID: "hash2", - UserID: "hash1", - Expected: Expected{ - namespace: nil, - err: errors.New("error"), - }, - }, - { - description: "succeeds", - RequiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - }, - } - - user := &models.User{ - UserData: models.UserData{ - Name: "user1", - Username: "username1", - }, - ID: "hash1", - } - - user2 := &models.User{ - UserData: models.UserData{ - Name: "user2", - Username: "username2", - }, - ID: "hash2", - } - - namespaceTwoMembers := &models.Namespace{ - Name: "group2", - Owner: "hash1", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - Members: []models.Member{ - {ID: "hash1", Role: guard.RoleOwner}, - {ID: "hash2", Role: guard.RoleObserver}, - }, - } - - mock.On("NamespaceGet", ctx, namespaceTwoMembers.TenantID).Return(namespaceTwoMembers, nil).Once() - mock.On("UserGetByID", ctx, namespace.Owner, false).Return(user, 0, nil).Once() - mock.On("UserGetByID", ctx, user2.ID, false).Return(user2, 0, nil).Once() - - mock.On("NamespaceRemoveMember", ctx, namespaceTwoMembers.TenantID, user2.ID).Return(namespace, nil).Once() - }, - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - MemberID: "hash2", - UserID: "hash1", - Expected: Expected{ - namespace: &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Members: []models.Member{{ID: "hash1", Role: guard.RoleOwner}}}, - err: nil, - }, - }, - } + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.RequiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - ns, err := service.RemoveNamespaceUser(ctx, tc.TenantID, tc.MemberID, tc.UserID) - assert.Equal(t, tc.Expected, Expected{ns, err}) - }) - } - - mock.AssertExpectations(t) -} - -func TestEditNamespaceUser(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - cases := []struct { - description string - TenantID string - UserID string - MemberID string - MemberNewRole string - RequiredMocks func() - Expected error - }{ - { - description: "fails when namespace was not found", - TenantID: "tenantIDNotFound", - UserID: "activeMemberID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleObserver, - RequiredMocks: func() { - mock.On("NamespaceGet", ctx, "tenantIDNotFound").Return(nil, errors.New("error")).Once() - }, - Expected: NewErrNamespaceNotFound("tenantIDNotFound", errors.New("error")), - }, - { - description: "fails when active member was not found", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484717", - UserID: "invalidMemberActiveID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleObserver, - RequiredMocks: func() { - namespaceActivePassive := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484717", - Members: []models.Member{ - {ID: "ownerID", Role: guard.RoleOwner}, - {ID: "activeMemberID", Role: guard.RoleAdministrator}, - {ID: "passiveMemberID", Role: guard.RoleObserver}, - }, - } - - mock.On("NamespaceGet", ctx, namespaceActivePassive.TenantID).Return(namespaceActivePassive, nil).Once() - - mock.On("UserGetByID", ctx, "invalidMemberActiveID", false).Return(nil, 0, errors.New("error")).Once() - }, - Expected: NewErrUserNotFound("invalidMemberActiveID", errors.New("error")), - }, - { - description: "fails when passive member was not found", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - UserID: "activeMemberID", - MemberID: "invalidMemberPassiveID", - MemberNewRole: guard.RoleObserver, - RequiredMocks: func() { - namespaceActiveOwner := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "activeMemberID", Role: guard.RoleOwner}, - }, - } - - activeMember := &models.User{ - UserData: models.UserData{ - Name: "activeMemberName", - Username: "activeMemberUsername", - }, - ID: "activeMemberID", - } - - mock.On("NamespaceGet", ctx, namespaceActiveOwner.TenantID).Return(namespaceActiveOwner, nil).Once() - - mock.On("UserGetByID", ctx, activeMember.ID, false).Return(activeMember, 0, nil).Once() - mock.On("UserGetByID", ctx, "invalidMemberPassiveID", false).Return(nil, 0, errors.New("error")).Once() - }, - Expected: NewErrUserNotFound("invalidMemberPassiveID", errors.New("error")), - }, - { - description: "fails when could not find passive member inside namespace", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - UserID: "activeMemberID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleObserver, - RequiredMocks: func() { - namespaceActiveOwner := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "activeMemberID", Role: guard.RoleOwner}, - }, - } - - activeMember := &models.User{ - UserData: models.UserData{ - Name: "activeMemberName", - Username: "activeMemberUsername", - }, - ID: "activeMemberID", - } - - passiveMember := &models.User{ - UserData: models.UserData{ - Name: "passiveMemberName", - Username: "passiveMemberUsername", - }, - ID: "passiveMemberID", - } - - mock.On("NamespaceGet", ctx, namespaceActiveOwner.TenantID).Return(namespaceActiveOwner, nil).Once() - - mock.On("UserGetByID", ctx, activeMember.ID, false).Return(activeMember, 0, nil).Once() - mock.On("UserGetByID", ctx, passiveMember.ID, false).Return(passiveMember, 0, nil).Once() - }, - Expected: NewErrNamespaceMemberNotFound("passiveMemberID", nil), - }, - { - description: "fails when could not find passive member inside namespace", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - UserID: "activeMemberID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleObserver, - RequiredMocks: func() { - namespaceActiveOwner := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - Members: []models.Member{ - {ID: "activeMemberID", Role: guard.RoleOwner}, - }, - } - - activeMember := &models.User{ - UserData: models.UserData{ - Name: "activeMemberName", - Username: "activeMemberUsername", - }, - ID: "activeMemberID", - } - - passiveMember := &models.User{ - UserData: models.UserData{ - Name: "passiveMemberName", - Username: "passiveMemberUsername", - }, - ID: "passiveMemberID", - } - - mock.On("NamespaceGet", ctx, namespaceActiveOwner.TenantID).Return(namespaceActiveOwner, nil).Once() - - mock.On("UserGetByID", ctx, activeMember.ID, false).Return(activeMember, 0, nil).Once() - mock.On("UserGetByID", ctx, passiveMember.ID, false).Return(passiveMember, 0, nil).Once() - }, - Expected: NewErrNamespaceMemberNotFound("passiveMemberID", nil), - }, - { - description: "fails when could not find active member inside namespace", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - UserID: "activeMemberID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleOperator, - RequiredMocks: func() { - activeMember := &models.User{ - UserData: models.UserData{ - Name: "activeMemberName", - Username: "activeMemberUsername", - }, - ID: "activeMemberID", - } - - passiveMember := &models.User{ - UserData: models.UserData{ - Name: "passiveMemberName", - Username: "passiveMemberUsername", - }, - ID: "passiveMemberID", - } - - namespacePassiveObserver := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484714", - Members: []models.Member{ - {ID: "memberID", Role: guard.RoleOwner}, - {ID: "passiveMemberID", Role: guard.RoleObserver}, - }, - } - - mock.On("NamespaceGet", ctx, namespacePassiveObserver.TenantID).Return(namespacePassiveObserver, nil).Once() - - mock.On("UserGetByID", ctx, activeMember.ID, false).Return(activeMember, 0, nil).Once() - mock.On("UserGetByID", ctx, passiveMember.ID, false).Return(passiveMember, 0, nil).Once() - }, - Expected: NewErrNamespaceMemberNotFound("activeMemberID", nil), - }, - { - description: "fails when active and passive roles are the same", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484715", - UserID: "activeMemberID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleOperator, - RequiredMocks: func() { - activeMember := &models.User{ - UserData: models.UserData{ - Name: "activeMemberName", - Username: "activeMemberUsername", - }, - ID: "activeMemberID", - } - - passiveMember := &models.User{ - UserData: models.UserData{ - Name: "passiveMemberName", - Username: "passiveMemberUsername", - }, - ID: "passiveMemberID", - } - - namespaceActivePassiveSame := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484715", - Members: []models.Member{ - {ID: "ownerID", Role: guard.RoleOwner}, - {ID: "activeMemberID", Role: guard.RoleAdministrator}, - {ID: "passiveMemberID", Role: guard.RoleAdministrator}, - }, - } - - mock.On("NamespaceGet", ctx, namespaceActivePassiveSame.TenantID).Return(namespaceActivePassiveSame, nil).Once() - - mock.On("UserGetByID", ctx, passiveMember.ID, false).Return(passiveMember, 0, nil).Once() - mock.On("UserGetByID", ctx, activeMember.ID, false).Return(activeMember, 0, nil).Once() - }, - Expected: guard.ErrForbidden, - }, - { - description: "fails when user can not act over the role", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484716", - UserID: "activeMemberID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleAdministrator, - RequiredMocks: func() { - activeMember := &models.User{ - UserData: models.UserData{ - Name: "activeMemberName", - Username: "activeMemberUsername", - }, - ID: "activeMemberID", - } - - passiveMember := &models.User{ - UserData: models.UserData{ - Name: "passiveMemberName", - Username: "passiveMemberUsername", - }, - ID: "passiveMemberID", - } - - namespaceActiveHasNoPermission := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484716", - Members: []models.Member{ - {ID: "ownerID", Role: guard.RoleOwner}, - {ID: "activeMemberID", Role: guard.RoleOperator}, - {ID: "passiveMemberID", Role: guard.RoleObserver}, - }, - } - - mock.On("NamespaceGet", ctx, namespaceActiveHasNoPermission.TenantID).Return(namespaceActiveHasNoPermission, nil).Once() - - mock.On("UserGetByID", ctx, passiveMember.ID, false).Return(passiveMember, 0, nil).Once() - mock.On("UserGetByID", ctx, activeMember.ID, false).Return(activeMember, 0, nil).Once() - }, - Expected: guard.ErrForbidden, - }, - { - description: "fails when user store function fails", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484717", - UserID: "activeMemberID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleOperator, - RequiredMocks: func() { - activeMember := &models.User{ - UserData: models.UserData{ - Name: "activeMemberName", - Username: "activeMemberUsername", - }, - ID: "activeMemberID", - } - - passiveMember := &models.User{ - UserData: models.UserData{ - Name: "passiveMemberName", - Username: "passiveMemberUsername", - }, - ID: "passiveMemberID", - } - - namespaceActivePassive := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484717", - Members: []models.Member{ - {ID: "ownerID", Role: guard.RoleOwner}, - {ID: "activeMemberID", Role: guard.RoleAdministrator}, - {ID: "passiveMemberID", Role: guard.RoleObserver}, - }, - } - - mock.On("NamespaceGet", ctx, namespaceActivePassive.TenantID).Return(namespaceActivePassive, nil).Once() - - mock.On("UserGetByID", ctx, passiveMember.ID, false).Return(passiveMember, 0, nil).Once() - mock.On("UserGetByID", ctx, activeMember.ID, false).Return(activeMember, 0, nil).Once() - - mock.On("NamespaceEditMember", ctx, namespaceActivePassive.TenantID, passiveMember.ID, guard.RoleOperator).Return(errors.New("error")).Once() - }, - Expected: errors.New("error"), - }, - { - description: "Success", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484717", - UserID: "activeMemberID", - MemberID: "passiveMemberID", - MemberNewRole: guard.RoleOperator, - RequiredMocks: func() { - namespaceActivePassive := &models.Namespace{ - Name: "group1", - Owner: "activeMemberID", - TenantID: "a736a52b-5777-4f92-b0b8-e359bf484717", - Members: []models.Member{ - {ID: "ownerID", Role: guard.RoleOwner}, - {ID: "activeMemberID", Role: guard.RoleAdministrator}, - {ID: "passiveMemberID", Role: guard.RoleObserver}, - }, - } - - activeMember := &models.User{ - UserData: models.UserData{ - Name: "activeMemberName", - Username: "activeMemberUsername", - }, - ID: "activeMemberID", - } - - passiveMember := &models.User{ - UserData: models.UserData{ - Name: "passiveMemberName", - Username: "passiveMemberUsername", - }, - ID: "passiveMemberID", - } - - mock.On("NamespaceGet", ctx, namespaceActivePassive.TenantID).Return(namespaceActivePassive, nil).Once() - - mock.On("UserGetByID", ctx, passiveMember.ID, false).Return(passiveMember, 0, nil).Once() - mock.On("UserGetByID", ctx, activeMember.ID, false).Return(activeMember, 0, nil).Once() - - mock.On("NamespaceEditMember", ctx, namespaceActivePassive.TenantID, passiveMember.ID, guard.RoleOperator).Return(nil).Once() - }, - Expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.RequiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.EditNamespaceUser(ctx, tc.TenantID, tc.UserID, tc.MemberID, tc.MemberNewRole) - assert.Equal(t, tc.Expected, err) - }) - } - mock.AssertExpectations(t) -} - -func TestGetSessionRecord(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - type Expected struct { - status bool - err error - } - - cases := []struct { - description string - requiredMocks func(namespace *models.Namespace) - namespace *models.Namespace - tenantID string - expected Expected - }{ - { - description: "fails when the namespace document is not found", - namespace: &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Settings: &models.NamespaceSettings{SessionRecord: true}}, - requiredMocks: func(namespace *models.Namespace) { - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, store.ErrNoDocuments).Once() - }, - expected: Expected{false, NewErrNamespaceNotFound("a736a52b-5777-4f92-b0b8-e359bf484713", store.ErrNoDocuments)}, - }, - { - description: "fails when store namespace get fails", - namespace: &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Settings: &models.NamespaceSettings{SessionRecord: true}}, - requiredMocks: func(namespace *models.Namespace) { - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(nil, errors.New("error")).Once() - }, - tenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - expected: Expected{false, NewErrNamespaceNotFound("a736a52b-5777-4f92-b0b8-e359bf484713", errors.New("error"))}, - }, - { - description: "fails when store namespace get session record fails", - namespace: &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Settings: &models.NamespaceSettings{SessionRecord: true}}, - requiredMocks: func(namespace *models.Namespace) { - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("NamespaceGetSessionRecord", ctx, namespace.TenantID).Return(false, errors.New("error")).Once() - }, - tenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - expected: Expected{false, errors.New("error")}, - }, - { - description: "succeeds", - namespace: &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", Settings: &models.NamespaceSettings{SessionRecord: true}}, - requiredMocks: func(namespace *models.Namespace) { - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("NamespaceGetSessionRecord", ctx, namespace.TenantID).Return(true, nil).Once() - }, - tenantID: "a736a52b-5777-4f92-b0b8-e359bf484713", - expected: Expected{true, nil}, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks(tc.namespace) - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - returnedUserSecurity, err := service.GetSessionRecord(ctx, tc.namespace.TenantID) - assert.Equal(t, tc.expected, Expected{returnedUserSecurity, err}) - }) - } - - mock.AssertExpectations(t) -} - -func TestEditSessionRecord(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - cases := []struct { - description string - namespace *models.Namespace - requiredMocks func() - sessionRecord bool - tenantID string - expected error - }{ - { - description: "fails when namespace set session record fails", - namespace: &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "xxxx", - Settings: &models.NamespaceSettings{SessionRecord: true}, - Members: []models.Member{ - { - ID: "hash1", - Role: guard.RoleOwner, - }, - { - ID: "hash2", - Role: guard.RoleObserver, - }, - }, - }, - requiredMocks: func() { - namespace := &models.Namespace{ - Name: "group1", - Owner: "hash1", - TenantID: "xxxx", - Settings: &models.NamespaceSettings{SessionRecord: true}, - Members: []models.Member{ - { - ID: "hash1", - Role: guard.RoleOwner, - }, - { - ID: "hash2", - Role: guard.RoleObserver, - }, - }, - } - - status := true - mock.On("NamespaceSetSessionRecord", ctx, status, namespace.TenantID).Return(errors.New("error")).Once() - }, - tenantID: "xxxx", - sessionRecord: true, - expected: errors.New("error"), - }, - { - description: "succeeds", - namespace: &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "xxxx", Settings: &models.NamespaceSettings{SessionRecord: true}, Members: []models.Member{ - { - ID: "hash1", - Role: guard.RoleOwner, - }, - { - ID: "hash2", - Role: guard.RoleObserver, - }, - }}, - requiredMocks: func() { - namespace := &models.Namespace{Name: "group1", Owner: "hash1", TenantID: "xxxx", Settings: &models.NamespaceSettings{SessionRecord: true}, Members: []models.Member{ - { - ID: "hash1", - Role: guard.RoleOwner, - }, - { - ID: "hash2", - Role: guard.RoleObserver, - }, - }} - - status := true - mock.On("NamespaceSetSessionRecord", ctx, status, namespace.TenantID).Return(nil).Once() - }, - tenantID: "xxxx", - sessionRecord: true, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.EditSessionRecordStatus(ctx, tc.sessionRecord, tc.tenantID) - assert.Equal(t, tc.expected, err) + t.Run(tc.name, func(t *testing.T) { + ctx := context.Background() + tc.mocks(ctx) + status, err := s.GetSessionRecord(ctx, tc.tenantID) + assert.Equal(t, tc.expected, Expected{status, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } diff --git a/api/services/service.go b/api/services/service.go index 88f9d788c58..9eb30b6866a 100644 --- a/api/services/service.go +++ b/api/services/service.go @@ -4,6 +4,7 @@ import ( "crypto/rsa" "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/internalclient" "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/geoip" "github.com/shellhub-io/shellhub/pkg/validator" @@ -20,7 +21,7 @@ type service struct { privKey *rsa.PrivateKey pubKey *rsa.PublicKey cache cache.Cache - client interface{} + client internalclient.Client locator geoip.Locator validator *validator.Validator } @@ -30,19 +31,27 @@ type Service interface { BillingInterface TagsService DeviceService - DeviceTags UserService SSHKeysService - SSHKeysTagsService SessionService NamespaceService + MemberService AuthService StatsService SetupService SystemService + APIKeyService } -func NewService(store store.Store, privKey *rsa.PrivateKey, pubKey *rsa.PublicKey, cache cache.Cache, c interface{}, l geoip.Locator) *APIService { +type Option func(service *APIService) + +func WithLocator(locator geoip.Locator) Option { + return func(service *APIService) { + service.locator = locator + } +} + +func NewService(store store.Store, privKey *rsa.PrivateKey, pubKey *rsa.PublicKey, cache cache.Cache, c internalclient.Client, options ...Option) *APIService { if privKey == nil || pubKey == nil { var err error privKey, pubKey, err = LoadKeys() @@ -51,5 +60,21 @@ func NewService(store store.Store, privKey *rsa.PrivateKey, pubKey *rsa.PublicKe } } - return &APIService{service: &service{store, privKey, pubKey, cache, c, l, validator.New()}} + service := &APIService{ + service: &service{ + store, + privKey, + pubKey, + cache, + c, + geoip.NewNullGeoLite(), + validator.New(), + }, + } + + for _, option := range options { + option(service) + } + + return service } diff --git a/api/services/session.go b/api/services/session.go index 7c826f82341..6cf35ec2032 100644 --- a/api/services/session.go +++ b/api/services/session.go @@ -5,26 +5,37 @@ import ( "net" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" ) type SessionService interface { - ListSessions(ctx context.Context, pagination paginator.Query) ([]models.Session, int, error) + ListSessions(ctx context.Context, req *requests.ListSessions) ([]models.Session, int, error) GetSession(ctx context.Context, uid models.UID) (*models.Session, error) CreateSession(ctx context.Context, session requests.SessionCreate) (*models.Session, error) DeactivateSession(ctx context.Context, uid models.UID) error KeepAliveSession(ctx context.Context, uid models.UID) error - SetSessionAuthenticated(ctx context.Context, uid models.UID, authenticated bool) error + UpdateSession(ctx context.Context, uid models.UID, model models.SessionUpdate) error + EventSession(ctx context.Context, uid models.UID, event *models.SessionEvent) error } -func (s *service) ListSessions(ctx context.Context, pagination paginator.Query) ([]models.Session, int, error) { - return s.store.SessionList(ctx, pagination) +func (s *service) ListSessions(ctx context.Context, req *requests.ListSessions) ([]models.Session, int, error) { + opts := make([]store.QueryOption, 0) + if req.TenantID != "" { + opts = append(opts, s.store.Options().InNamespace(req.TenantID)) + } + + opts = append(opts, s.store.Options().Sort(&query.Sorter{By: "started_at", Order: query.OrderDesc})) + opts = append(opts, s.store.Options().Paginate(&req.Paginator)) + + return s.store.SessionList(ctx, opts...) } func (s *service) GetSession(ctx context.Context, uid models.UID) (*models.Session, error) { - session, err := s.store.SessionGet(ctx, uid) + session, err := s.store.SessionResolve(ctx, store.SessionUIDResolver, string(uid)) if err != nil { return nil, NewErrSessionNotFound(uid, err) } @@ -35,7 +46,7 @@ func (s *service) GetSession(ctx context.Context, uid models.UID) (*models.Sessi func (s *service) CreateSession(ctx context.Context, session requests.SessionCreate) (*models.Session, error) { position, _ := s.locator.GetPosition(net.ParseIP(session.IPAddress)) - return s.store.SessionCreate(ctx, models.Session{ + uid, err := s.store.SessionCreate(ctx, models.Session{ UID: session.UID, DeviceUID: models.UID(session.DeviceUID), Username: session.Username, @@ -47,21 +58,66 @@ func (s *service) CreateSession(ctx context.Context, session requests.SessionCre Latitude: position.Latitude, }, }) + if err != nil { + return nil, err + } + + return s.store.SessionResolve(ctx, store.SessionUIDResolver, uid) } func (s *service) DeactivateSession(ctx context.Context, uid models.UID) error { - err := s.store.SessionDeleteActives(ctx, uid) - if err == store.ErrNoDocuments { + sess, err := s.store.SessionResolve(ctx, store.SessionUIDResolver, string(uid)) + if err != nil { return NewErrSessionNotFound(uid, err) } - return err + return s.store.ActiveSessionDelete(ctx, models.UID(sess.UID)) } func (s *service) KeepAliveSession(ctx context.Context, uid models.UID) error { - return s.store.SessionSetLastSeen(ctx, uid) + session, err := s.store.SessionResolve(ctx, store.SessionUIDResolver, string(uid)) + if err != nil { + return NewErrSessionNotFound(uid, err) + } + + session.LastSeen = clock.Now() + + return s.store.SessionUpdate(ctx, session) } -func (s *service) SetSessionAuthenticated(ctx context.Context, uid models.UID, authenticated bool) error { - return s.store.SessionSetAuthenticated(ctx, uid, authenticated) +func (s *service) UpdateSession(ctx context.Context, uid models.UID, model models.SessionUpdate) error { + session, err := s.store.SessionResolve(ctx, store.SessionUIDResolver, string(uid)) + if err != nil { + return NewErrSessionNotFound(uid, err) + } + + if model.Authenticated != nil { + session.Authenticated = *model.Authenticated + } + + if model.Type != nil { + session.Type = *model.Type + } + + if model.Recorded != nil { + session.Recorded = *model.Recorded + } + + // We need to create an active session when authenticated to maintain compatibility with the old store implementation. + // In the future, we may refactor the store to remove the active_session pattern. + if session.Authenticated { + if err := s.store.ActiveSessionCreate(ctx, session); err != nil { + log.WithError(err).WithField("session_id", session.UID).Warn("failed to activate the session") + } + } + + return s.store.SessionUpdate(ctx, session) +} + +func (s *service) EventSession(ctx context.Context, uid models.UID, event *models.SessionEvent) error { + if _, err := s.store.SessionResolve(ctx, store.SessionUIDResolver, string(uid)); err != nil { + return NewErrSessionNotFound(uid, err) + } + + return s.store.SessionEventsCreate(ctx, event) } diff --git a/api/services/session_test.go b/api/services/session_test.go index 69e5dbc24e6..e59bb133d7b 100644 --- a/api/services/session_test.go +++ b/api/services/session_test.go @@ -8,18 +8,21 @@ import ( goerrors "errors" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mocks" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + storemock "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/api/requests" storecache "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/geoip" mocksGeoIp "github.com/shellhub-io/shellhub/pkg/geoip/mocks" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" ) func TestListSessions(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() @@ -30,16 +33,31 @@ func TestListSessions(t *testing.T) { } cases := []struct { - name string - pagination paginator.Query - requiredMocks func(query paginator.Query) + description string + req *requests.ListSessions + requiredMocks func() expected Expected }{ { - name: "fails", - pagination: paginator.Query{Page: 1, PerPage: 10}, - requiredMocks: func(query paginator.Query) { - mock.On("SessionList", ctx, query). + description: "fails", + req: &requests.ListSessions{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + }, + requiredMocks: func() { + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "started_at", Order: query.OrderDesc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock.On("SessionList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). Return(nil, 0, goerrors.New("error")).Once() }, expected: Expected{ @@ -49,15 +67,30 @@ func TestListSessions(t *testing.T) { }, }, { - name: "succeeds", - pagination: paginator.Query{Page: 1, PerPage: 10}, - requiredMocks: func(query paginator.Query) { + description: "succeeds", + req: &requests.ListSessions{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + }, + requiredMocks: func() { sessions := []models.Session{ {UID: "uid1"}, {UID: "uid2"}, {UID: "uid3"}, } - mock.On("SessionList", ctx, query). + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "started_at", Order: query.OrderDesc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock.On("SessionList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). Return(sessions, len(sessions), nil).Once() }, expected: Expected{ @@ -76,21 +109,22 @@ func TestListSessions(t *testing.T) { }, } + service := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - tc.requiredMocks(tc.pagination) + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - returnedSessions, count, err := service.ListSessions(ctx, tc.pagination) + returnedSessions, count, err := service.ListSessions(ctx, tc.req) assert.Equal(t, tc.expected, Expected{returnedSessions, count, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestGetSession(t *testing.T) { - mock := new(mocks.Store) + mock := new(storemock.Store) ctx := context.TODO() @@ -110,7 +144,7 @@ func TestGetSession(t *testing.T) { name: "fails when session is not found", uid: models.UID("_uid"), requiredMocks: func() { - mock.On("SessionGet", ctx, models.UID("_uid")). + mock.On("SessionResolve", ctx, store.SessionUIDResolver, "_uid"). Return(nil, goerrors.New("error")).Once() }, expected: Expected{ @@ -124,7 +158,7 @@ func TestGetSession(t *testing.T) { requiredMocks: func() { session := &models.Session{UID: "uid"} - mock.On("SessionGet", ctx, models.UID("uid")). + mock.On("SessionResolve", ctx, store.SessionUIDResolver, "uid"). Return(session, nil).Once() }, expected: Expected{ @@ -138,7 +172,7 @@ func TestGetSession(t *testing.T) { t.Run(tc.name, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock) returnedSession, err := service.GetSession(ctx, tc.uid) assert.Equal(t, tc.expected, Expected{returnedSession, err}) }) @@ -148,7 +182,7 @@ func TestGetSession(t *testing.T) { } func TestCreateSession(t *testing.T) { - mock := new(mocks.Store) + mock := new(storemock.Store) ctx := context.TODO() @@ -180,7 +214,7 @@ func TestCreateSession(t *testing.T) { locator.On("GetPosition", net.ParseIP(model.IPAddress)). Return(geoip.Position{}, nil).Once() mock.On("SessionCreate", ctx, model). - Return(nil, Err).Once() + Return("", Err).Once() }, expected: Expected{ session: nil, @@ -194,6 +228,8 @@ func TestCreateSession(t *testing.T) { locator.On("GetPosition", net.ParseIP(model.IPAddress)). Return(geoip.Position{}, nil).Once() mock.On("SessionCreate", ctx, model). + Return("uid", nil).Once() + mock.On("SessionResolve", ctx, store.SessionUIDResolver, "uid"). Return(&model, nil).Once() }, expected: Expected{ @@ -207,7 +243,7 @@ func TestCreateSession(t *testing.T) { t.Run(tc.name, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, locator) + service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, WithLocator(locator)) returnedSession, err := service.CreateSession(ctx, tc.session) assert.Equal(t, tc.expected, Expected{returnedSession, err}) }) @@ -217,7 +253,7 @@ func TestCreateSession(t *testing.T) { } func TestDeactivateSession(t *testing.T) { - mock := new(mocks.Store) + mock := new(storemock.Store) ctx := context.TODO() @@ -231,25 +267,35 @@ func TestDeactivateSession(t *testing.T) { name: "fails when session is not found", uid: models.UID("_uid"), requiredMocks: func() { - mock.On("SessionDeleteActives", ctx, models.UID("_uid")). - Return(store.ErrNoDocuments).Once() + mock.On("SessionResolve", ctx, store.SessionUIDResolver, "_uid"). + Return(nil, goerrors.New("get error")).Once() }, - expected: NewErrSessionNotFound("_uid", store.ErrNoDocuments), + expected: NewErrSessionNotFound("_uid", goerrors.New("get error")), }, { name: "fails", uid: models.UID("_uid"), requiredMocks: func() { - mock.On("SessionDeleteActives", ctx, models.UID("_uid")). + mock.On("SessionResolve", ctx, store.SessionUIDResolver, "_uid"). + Return(&models.Session{ + UID: "_uid", + }, nil).Once() + + mock.On("ActiveSessionDelete", ctx, models.UID("_uid")). Return(goerrors.New("error")).Once() }, expected: goerrors.New("error"), }, { name: "succeeds", - uid: models.UID("uid"), + uid: models.UID("_uid"), requiredMocks: func() { - mock.On("SessionDeleteActives", ctx, models.UID("uid")). + mock.On("SessionResolve", ctx, store.SessionUIDResolver, "_uid"). + Return(&models.Session{ + UID: "_uid", + }, nil).Once() + + mock.On("ActiveSessionDelete", ctx, models.UID("_uid")). Return(nil).Once() }, expected: nil, @@ -260,7 +306,7 @@ func TestDeactivateSession(t *testing.T) { t.Run(tc.name, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock) err := service.DeactivateSession(ctx, tc.uid) assert.Equal(t, tc.expected, err) }) @@ -269,46 +315,63 @@ func TestDeactivateSession(t *testing.T) { mock.AssertExpectations(t) } -func TestSetSessionAuthenticated(t *testing.T) { - mock := new(mocks.Store) +func TestUpdateSession(t *testing.T) { + mockStore := new(storemock.Store) + ctx := context.Background() + uid := models.UID("test-uid") + updateModel := models.SessionUpdate{} + theTrue := true + updateModel.Authenticated = &theTrue - ctx := context.TODO() + sess := &models.Session{UID: string(uid)} cases := []struct { - name string - uid models.UID + description string requiredMocks func() - expected error + expectedErr error }{ { - name: "fails", - uid: models.UID("_uid"), + description: "fails when SessionGet returns error", requiredMocks: func() { - mock.On("SessionSetAuthenticated", ctx, models.UID("_uid"), true). - Return(goerrors.New("error")).Once() + mockStore.On("SessionResolve", ctx, store.SessionUIDResolver, string(uid)). + Return(nil, goerrors.New("get error")).Once() }, - expected: goerrors.New("error"), + expectedErr: NewErrSessionNotFound(uid, goerrors.New("get error")), }, { - name: "succeeds", - uid: models.UID("uid"), + description: "fails when SessionUpdate returns error", requiredMocks: func() { - mock.On("SessionSetAuthenticated", ctx, models.UID("uid"), true). + mockStore.On("SessionResolve", ctx, store.SessionUIDResolver, string(uid)). + Return(sess, nil).Once() + mockStore.On("ActiveSessionCreate", ctx, sess). Return(nil).Once() + mockStore.On("SessionUpdate", ctx, sess). + Return(goerrors.New("update error")).Once() }, - expected: nil, + expectedErr: goerrors.New("update error"), + }, + { + description: "succeeds when no errors", + requiredMocks: func() { + mockStore.On("SessionResolve", ctx, store.SessionUIDResolver, string(uid)). + Return(sess, nil).Once() + mockStore.On("ActiveSessionCreate", ctx, sess). + Return(nil).Once() + mockStore.On("SessionUpdate", ctx, sess). + Return(nil).Once() + }, + expectedErr: nil, }, } + service := NewService(store.Store(mockStore), privateKey, publicKey, storecache.NewNullCache(), clientMock) for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { + t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := service.SetSessionAuthenticated(ctx, tc.uid, true) - assert.Equal(t, tc.expected, err) + err := service.UpdateSession(ctx, uid, updateModel) + assert.Equal(t, tc.expectedErr, err) }) } - mock.AssertExpectations(t) + mockStore.AssertExpectations(t) } diff --git a/api/services/setup.go b/api/services/setup.go index 6b68a97c59d..2e5b0553905 100644 --- a/api/services/setup.go +++ b/api/services/setup.go @@ -2,63 +2,142 @@ package services import ( "context" + "crypto" + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "crypto/x509" + "encoding/hex" + "encoding/pem" + "os" - "github.com/shellhub-io/shellhub/api/pkg/guard" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/uuid" ) +const PrivateKeyPath = "/var/run/secrets/api_private_key" + type SetupService interface { Setup(ctx context.Context, req requests.Setup) error + SetupVerify(ctx context.Context, sign string) error } func (s *service) Setup(ctx context.Context, req requests.Setup) error { + system, err := s.store.SystemGet(ctx) + if err != nil || system.Setup { + return NewErrSetupForbidden(err) + } + data := models.UserData{ - Name: req.Name, - Email: req.Email, - Username: req.Username, + Name: req.Name, + Email: req.Email, + Username: req.Username, + RecoveryEmail: "", } if ok, err := s.validator.Struct(data); !ok || err != nil { return NewErrUserInvalid(nil, err) } - password := models.NewUserPassword(req.Password) + password, err := models.HashUserPassword(req.Password) + if err != nil { + return NewErrUserPasswordInvalid(err) + } if ok, err := s.validator.Struct(password); !ok || err != nil { return NewErrUserPasswordInvalid(err) } user := &models.User{ - UserData: data, - UserPassword: password, + Origin: models.UserOriginLocal, + UserData: data, + Password: password, // NOTE: user's created from the setup screen doesn't need to be confirmed. - Confirmed: true, - CreatedAt: clock.Now(), + Status: models.UserStatusConfirmed, + CreatedAt: clock.Now(), + MaxNamespaces: -1, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + // NOTE: The first user is always an admin. + Admin: true, } - err := s.store.UserCreate(ctx, user) + insertedID, err := s.store.UserCreate(ctx, user) if err != nil { return NewErrUserDuplicated([]string{req.Username}, err) } namespace := &models.Namespace{ - Name: req.Namespace, - Owner: user.ID, - MaxDevices: 0, + Name: req.Username, + TenantID: uuid.Generate(), + MaxDevices: -1, + Owner: insertedID, + Type: models.TypePersonal, Members: []models.Member{ { - ID: user.ID, - Role: guard.RoleOwner, + ID: insertedID, + Role: authorizer.RoleOwner, + AddedAt: clock.Now(), }, }, CreatedAt: clock.Now(), + Settings: &models.NamespaceSettings{ + SessionRecord: false, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, } if _, err = s.store.NamespaceCreate(ctx, namespace); err != nil { + user.ID = insertedID + if err := s.store.UserDelete(ctx, user); err != nil { + return NewErrUserDelete(err) + } + return NewErrNamespaceDuplicated(err) } + system.Setup = true + if err := s.store.SystemSet(ctx, system); err != nil { + return err + } + + return nil +} + +func (s *service) SetupVerify(_ context.Context, sign string) error { + privKeyData, err := os.ReadFile(PrivateKeyPath) + if err != nil { + return err + } + + privKeyPem, _ := pem.Decode(privKeyData) + privKey, err := x509.ParsePKCS8PrivateKey(privKeyPem.Bytes) + if err != nil { + return err + } + + const msgString = "shellhub" + + msgHash := sha256.New() + _, err = msgHash.Write([]byte(msgString)) + if err != nil { + return err + } + + signed, err := rsa.SignPKCS1v15(rand.Reader, privKey.(*rsa.PrivateKey), crypto.SHA256, msgHash.Sum(nil)) + if err != nil { + return err + } + + sumSigned := sha256.Sum256(signed) + + if sign != hex.EncodeToString(sumSigned[:]) { + return NewErrSetupForbidden(nil) + } + return nil } diff --git a/api/services/setup_test.go b/api/services/setup_test.go index 023e31aa38b..7eea9a43660 100644 --- a/api/services/setup_test.go +++ b/api/services/setup_test.go @@ -3,20 +3,36 @@ package services import ( "context" "testing" + "time" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/api/requests" storecache "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" "github.com/shellhub-io/shellhub/pkg/errors" "github.com/shellhub-io/shellhub/pkg/models" - uuid_mocks "github.com/shellhub-io/shellhub/pkg/uuid/mocks" + "github.com/shellhub-io/shellhub/pkg/uuid" + uuidmock "github.com/shellhub-io/shellhub/pkg/uuid/mocks" "github.com/stretchr/testify/assert" ) func TestSetup(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(mocks.Store) + + clockMock := new(clockmock.Clock) + clock.DefaultBackend = clockMock + + tenant := "00000000-0000-4000-0000-000000000000" + + uuidMock := new(uuidmock.Uuid) + uuid.DefaultBackend = uuidMock + uuidMock.On("Generate").Return(tenant) + + now := time.Now() + clockMock.On("Now").Return(now) ctx := context.TODO() @@ -26,108 +42,323 @@ func TestSetup(t *testing.T) { requiredMocks func() expected error }{ + { + description: "Fail when setup isn't allowed", + req: requests.Setup{ + Email: "teste@google.com", + Name: "userteste", + Username: "userteste", + Password: "secret", + }, + requiredMocks: func() { + storeMock.On("SystemGet", ctx).Return(nil, errors.New("error", "", 0)).Once() + }, + expected: NewErrSetupForbidden(errors.New("error", "", 0)), + }, + { + description: "Fail when cannot hash the password", + req: requests.Setup{ + Email: "teste@google.com", + Name: "userteste", + Username: "userteste", + Password: "secret", + }, + requiredMocks: func() { + storeMock.On("SystemGet", ctx).Return(&models.System{ + Setup: false, + }, nil).Once() + + hashMock. + On("Do", "secret"). + Return("", errors.New("error", "", 0)). + Once() + }, + expected: NewErrUserPasswordInvalid(errors.New("error", "", 0)), + }, { description: "Fail when cannot create the user", req: requests.Setup{ - Email: "teste@google.com", - Name: "userteste", - Username: "userteste", - Password: "123456", - Namespace: "teste-space", + Email: "teste@google.com", + Name: "userteste", + Username: "userteste", + Password: "secret", }, requiredMocks: func() { - clockMock.On("Now").Return(now).Once() + storeMock.On("SystemGet", ctx).Return(&models.System{ + Setup: false, + }, nil).Once() + + hashMock. + On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", nil). + Once() + user := &models.User{ + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + CreatedAt: now, UserData: models.UserData{ Name: "userteste", Email: "teste@google.com", Username: "userteste", }, - UserPassword: models.NewUserPassword("123456"), - Confirmed: true, - CreatedAt: now, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + MaxNamespaces: -1, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: true, } - mock.On("UserCreate", ctx, user).Return(errors.New("error", "", 0)).Once() + storeMock.On("UserCreate", ctx, user).Return("", errors.New("error", "", 0)).Once() }, expected: NewErrUserDuplicated([]string{"userteste"}, errors.New("error", "", 0)), }, { - description: "Fail when cannot create namespace", + description: "Fail when cannot create namespace, and user deletion fails", req: requests.Setup{ - Email: "teste@google.com", - Name: "userteste", - Username: "userteste", - Password: "123456", - Namespace: "teste-space", + Email: "teste@google.com", + Name: "userteste", + Username: "userteste", + Password: "secret", }, requiredMocks: func() { + user := &models.User{ + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + CreatedAt: now, + UserData: models.UserData{ + Name: "userteste", + Email: "teste@google.com", + Username: "userteste", + }, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + MaxNamespaces: -1, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: true, + } + userWithID := &models.User{ + ID: "000000000000000000000000", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + CreatedAt: now, + UserData: models.UserData{ + Name: "userteste", + Email: "teste@google.com", + Username: "userteste", + }, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + MaxNamespaces: -1, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: true, + } + + storeMock.On("SystemGet", ctx).Return(&models.System{ + Setup: false, + }, nil).Once() + clockMock.On("Now").Return(now).Twice() - uuidMock := &uuid_mocks.Uuid{} + + uuidMock := &uuidmock.Uuid{} uuidMock.On("Generate").Return("random_uuid").Once() + + hashMock. + On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", nil). + Once() + + storeMock.On("UserCreate", ctx, user).Return("000000000000000000000000", nil).Once() + + namespace := &models.Namespace{ + Name: "userteste", + TenantID: tenant, + Owner: "000000000000000000000000", + MaxDevices: -1, + Type: models.TypePersonal, + Members: []models.Member{ + { + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: false, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, + CreatedAt: now, + } + + storeMock.On("NamespaceCreate", ctx, namespace).Return("", errors.New("error", "", 0)).Once() + storeMock.On("UserDelete", ctx, userWithID).Return(errors.New("error", "", 0)).Once() + }, + expected: NewErrUserDelete(errors.New("error", "", 0)), + }, + { + description: "Fail when cannot create namespace, and user deletion fails", + req: requests.Setup{ + Email: "teste@google.com", + Name: "userteste", + Username: "userteste", + Password: "secret", + }, + requiredMocks: func() { user := &models.User{ + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + CreatedAt: now, UserData: models.UserData{ Name: "userteste", Email: "teste@google.com", Username: "userteste", }, - UserPassword: models.NewUserPassword("123456"), - Confirmed: true, - CreatedAt: now, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + MaxNamespaces: -1, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: true, } + userWithID := &models.User{ + ID: "000000000000000000000000", + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + CreatedAt: now, + UserData: models.UserData{ + Name: "userteste", + Email: "teste@google.com", + Username: "userteste", + }, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + MaxNamespaces: -1, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: true, + } + + storeMock.On("SystemGet", ctx).Return(&models.System{ + Setup: false, + }, nil).Once() + + clockMock.On("Now").Return(now).Twice() + + uuidMock := &uuidmock.Uuid{} + uuidMock.On("Generate").Return("random_uuid").Once() + + hashMock. + On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", nil). + Once() + + storeMock.On("UserCreate", ctx, user).Return("000000000000000000000000", nil).Once() + namespace := &models.Namespace{ - Name: "teste-space", - Owner: user.ID, - MaxDevices: 0, + Name: "userteste", + TenantID: tenant, + Owner: "000000000000000000000000", + MaxDevices: -1, + Type: models.TypePersonal, Members: []models.Member{ { - ID: user.ID, - Role: guard.RoleOwner, + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, }, }, + Settings: &models.NamespaceSettings{ + SessionRecord: false, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, CreatedAt: now, } - mock.On("UserCreate", ctx, user).Return(nil).Once() - mock.On("NamespaceCreate", ctx, namespace).Return(namespace, errors.New("error", "", 0)).Once() + + storeMock.On("NamespaceCreate", ctx, namespace).Return("", errors.New("error", "", 0)).Once() + storeMock.On("UserDelete", ctx, userWithID).Return(errors.New("error", "", 0)).Once() }, - expected: NewErrNamespaceDuplicated(errors.New("error", "", 0)), + expected: NewErrUserDelete(errors.New("error", "", 0)), }, { description: "Success to create the user and namespace", req: requests.Setup{ - Email: "teste@google.com", - Name: "userteste", - Username: "userteste", - Password: "123456", - Namespace: "teste-space", + Email: "teste@google.com", + Name: "userteste", + Username: "userteste", + Password: "secret", }, requiredMocks: func() { + initialSystem := &models.System{Setup: false} + finalSystem := &models.System{Setup: true} + + storeMock.On("SystemGet", ctx).Return(initialSystem, nil).Once() + + hashMock.On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", nil). + Once() + clockMock.On("Now").Return(now).Twice() - uuidMock := &uuid_mocks.Uuid{} - uuidMock.On("Generate").Return("random_uuid").Once() + user := &models.User{ + Origin: models.UserOriginLocal, + Status: models.UserStatusConfirmed, + CreatedAt: now, UserData: models.UserData{ Name: "userteste", Email: "teste@google.com", Username: "userteste", }, - UserPassword: models.NewUserPassword("123456"), - Confirmed: true, - CreatedAt: now, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + MaxNamespaces: -1, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: true, } + + storeMock.On("UserCreate", ctx, user).Return("000000000000000000000000", nil).Once() + namespace := &models.Namespace{ - Name: "teste-space", - Owner: user.ID, - MaxDevices: 0, + Name: "userteste", + TenantID: tenant, + Owner: "000000000000000000000000", + MaxDevices: -1, + Type: models.TypePersonal, Members: []models.Member{ { - ID: user.ID, - Role: guard.RoleOwner, + ID: "000000000000000000000000", + Role: authorizer.RoleOwner, + AddedAt: now, }, }, + Settings: &models.NamespaceSettings{ + SessionRecord: false, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, CreatedAt: now, } - mock.On("UserCreate", ctx, user).Return(nil).Once() - mock.On("NamespaceCreate", ctx, namespace).Return(namespace, nil).Once() + storeMock.On("NamespaceCreate", ctx, namespace).Return(tenant, nil).Once() + storeMock.On("SystemSet", ctx, finalSystem).Return(nil).Once() }, expected: nil, }, @@ -137,7 +368,7 @@ func TestSetup(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + service := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) err := service.Setup(ctx, tc.req) assert.Equal(t, tc.expected, err) diff --git a/api/services/sshkeys.go b/api/services/sshkeys.go index 1c02f3c7245..64cd60ef93a 100644 --- a/api/services/sshkeys.go +++ b/api/services/sshkeys.go @@ -7,9 +7,9 @@ import ( "crypto/x509" "encoding/pem" "regexp" + "slices" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/api/responses" "github.com/shellhub-io/shellhub/pkg/clock" @@ -20,7 +20,7 @@ import ( type SSHKeysService interface { EvaluateKeyFilter(ctx context.Context, key *models.PublicKey, dev models.Device) (bool, error) EvaluateKeyUsername(ctx context.Context, key *models.PublicKey, username string) (bool, error) - ListPublicKeys(ctx context.Context, pagination paginator.Query) ([]models.PublicKey, int, error) + ListPublicKeys(ctx context.Context, req *requests.ListPublicKeys) ([]models.PublicKey, int, error) GetPublicKey(ctx context.Context, fingerprint, tenant string) (*models.PublicKey, error) CreatePublicKey(ctx context.Context, req requests.PublicKeyCreate, tenant string) (*responses.PublicKeyCreate, error) UpdatePublicKey(ctx context.Context, fingerprint, tenant string, key requests.PublicKeyUpdate) (*models.PublicKey, error) @@ -32,25 +32,34 @@ type Request struct { Namespace string } -func (s *service) EvaluateKeyFilter(_ context.Context, key *models.PublicKey, dev models.Device) (bool, error) { - if key.Filter.Hostname != "" { +func (s *service) EvaluateKeyFilter(ctx context.Context, key *models.PublicKey, dev models.Device) (bool, error) { + switch { + case key.Filter.Hostname != "": ok, err := regexp.MatchString(key.Filter.Hostname, dev.Name) if err != nil { return false, err } return ok, nil - } else if len(key.Filter.Tags) > 0 { - for _, tag := range dev.Tags { - if contains(key.Filter.Tags, tag) { + case len(key.Filter.TagIDs) > 0: + // NOTE: We need to resolve the device from the store because the "dev" parameter + // is constructed from the JSON request body, which doesn't include tag_ids since + // the agent doesn't send this information. + d, err := s.store.DeviceResolve(ctx, store.DeviceUIDResolver, dev.UID) + if err != nil { + return false, NewErrDeviceNotFound(models.UID(dev.UID), err) + } + + for _, tagID := range d.TagIDs { + if slices.Contains(key.Filter.TagIDs, tagID) { return true, nil } } return false, nil + default: + return true, nil } - - return true, nil } func (s *service) EvaluateKeyUsername(_ context.Context, key *models.PublicKey, username string) (bool, error) { @@ -67,26 +76,36 @@ func (s *service) EvaluateKeyUsername(_ context.Context, key *models.PublicKey, } func (s *service) GetPublicKey(ctx context.Context, fingerprint, tenant string) (*models.PublicKey, error) { - _, err := s.store.NamespaceGet(ctx, tenant) - if err != nil { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, tenant); err != nil { return nil, NewErrNamespaceNotFound(tenant, err) } - return s.store.PublicKeyGet(ctx, fingerprint, tenant) + return s.store.PublicKeyResolve(ctx, store.PublicKeyFingerprintResolver, fingerprint, s.store.Options().InNamespace(tenant)) } func (s *service) CreatePublicKey(ctx context.Context, req requests.PublicKeyCreate, tenant string) (*responses.PublicKeyCreate, error) { // Checks if public key filter type is Tags. // If it is, checks if there are, at least, one tag on the public key filter and if the all tags exist on database. + tagIDs := []string{} if req.Filter.Tags != nil { - tags, _, err := s.store.TagsGet(ctx, tenant) + tags, _, err := s.store.TagList(ctx, s.store.Options().InNamespace(tenant)) if err != nil { return nil, NewErrTagEmpty(tenant, err) } - for _, tag := range req.Filter.Tags { - if !contains(tags, tag) { - return nil, NewErrTagNotFound(tag, nil) + for _, tagName := range req.Filter.Tags { + found := false + for _, tag := range tags { + if tagName == tag.Name { + tagIDs = append(tagIDs, tag.ID) + found = true + + break + } + } + + if !found { + return nil, NewErrTagNotFound(tagName, nil) } } } @@ -98,7 +117,7 @@ func (s *service) CreatePublicKey(ctx context.Context, req requests.PublicKeyCre req.Fingerprint = ssh.FingerprintLegacyMD5(pubKey) - returnedKey, err := s.store.PublicKeyGet(ctx, req.Fingerprint, tenant) + returnedKey, err := s.store.PublicKeyResolve(ctx, store.PublicKeyFingerprintResolver, req.Fingerprint, s.store.Options().InNamespace(tenant)) if err != nil && err != store.ErrNoDocuments { return nil, NewErrPublicKeyNotFound(req.Fingerprint, err) } @@ -117,19 +136,18 @@ func (s *service) CreatePublicKey(ctx context.Context, req requests.PublicKeyCre Username: req.Username, Filter: models.PublicKeyFilter{ Hostname: req.Filter.Hostname, - Tags: req.Filter.Tags, + Taggable: models.Taggable{TagIDs: tagIDs, Tags: nil}, }, }, } - err = s.store.PublicKeyCreate(ctx, &model) - if err != nil { + if _, err := s.store.PublicKeyCreate(ctx, &model); err != nil { return nil, err } return &responses.PublicKeyCreate{ Data: model.Data, - Filter: responses.PublicKeyFilter(model.Filter), + Filter: responses.PublicKeyFilter{Hostname: model.Filter.Hostname, Tags: req.Filter.Tags}, Name: model.Name, Username: model.Username, TenantID: model.TenantID, @@ -137,52 +155,71 @@ func (s *service) CreatePublicKey(ctx context.Context, req requests.PublicKeyCre }, nil } -func (s *service) ListPublicKeys(ctx context.Context, pagination paginator.Query) ([]models.PublicKey, int, error) { - return s.store.PublicKeyList(ctx, pagination) +func (s *service) ListPublicKeys(ctx context.Context, req *requests.ListPublicKeys) ([]models.PublicKey, int, error) { + return s.store.PublicKeyList( + ctx, + s.store.Options().InNamespace(req.TenantID), + s.store.Options().Paginate(&req.Paginator), + ) } func (s *service) UpdatePublicKey(ctx context.Context, fingerprint, tenant string, key requests.PublicKeyUpdate) (*models.PublicKey, error) { + publicKey, err := s.store.PublicKeyResolve(ctx, store.PublicKeyFingerprintResolver, fingerprint, s.store.Options().InNamespace(tenant)) + if err != nil { + return nil, NewErrPublicKeyNotFound(fingerprint, err) + } + // Checks if public key filter type is Tags. If it is, checks if there are, at least, one tag on the public key // filter and if the all tags exist on database. + tagIDs := []string{} if key.Filter.Tags != nil { - tags, _, err := s.store.TagsGet(ctx, tenant) + tags, _, err := s.store.TagList(ctx, s.store.Options().InNamespace(tenant)) if err != nil { return nil, NewErrTagEmpty(tenant, err) } - for _, tag := range key.Filter.Tags { - if !contains(tags, tag) { - return nil, NewErrTagNotFound(tag, nil) + for _, tagName := range key.Filter.Tags { + found := false + for _, tag := range tags { + if tagName == tag.Name { + tagIDs = append(tagIDs, tag.ID) + found = true + + break + } + } + + if !found { + return nil, NewErrTagNotFound(tagName, nil) } } } - model := models.PublicKeyUpdate{ - PublicKeyFields: models.PublicKeyFields{ - Name: key.Name, - Username: key.Username, - Filter: models.PublicKeyFilter{ - Hostname: key.Filter.Hostname, - Tags: key.Filter.Tags, - }, - }, + // Update the public key fields + publicKey.Name = key.Name + publicKey.Username = key.Username + publicKey.Filter.Hostname = key.Filter.Hostname + publicKey.Filter.TagIDs = tagIDs + publicKey.Filter.Tags = nil + + if err := s.store.PublicKeyUpdate(ctx, publicKey); err != nil { + return nil, err } - return s.store.PublicKeyUpdate(ctx, fingerprint, tenant, &model) + return s.store.PublicKeyResolve(ctx, store.PublicKeyFingerprintResolver, fingerprint, s.store.Options().InNamespace(tenant)) } func (s *service) DeletePublicKey(ctx context.Context, fingerprint, tenant string) error { - _, err := s.store.NamespaceGet(ctx, tenant) - if err != nil { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, tenant); err != nil { return NewErrNamespaceNotFound(tenant, err) } - _, err = s.store.PublicKeyGet(ctx, fingerprint, tenant) + publicKey, err := s.store.PublicKeyResolve(ctx, store.PublicKeyFingerprintResolver, fingerprint, s.store.Options().InNamespace(tenant)) if err != nil { return NewErrPublicKeyNotFound(fingerprint, err) } - return s.store.PublicKeyDelete(ctx, fingerprint, tenant) + return s.store.PublicKeyDelete(ctx, publicKey) } func (s *service) CreatePrivateKey(ctx context.Context) (*models.PrivateKey, error) { diff --git a/api/services/sshkeys_tags.go b/api/services/sshkeys_tags.go deleted file mode 100644 index 470cd00f06f..00000000000 --- a/api/services/sshkeys_tags.go +++ /dev/null @@ -1,147 +0,0 @@ -package services - -import ( - "context" - - "github.com/shellhub-io/shellhub/api/store" -) - -type SSHKeysTagsService interface { - AddPublicKeyTag(ctx context.Context, tenant, fingerprint, tag string) error - RemovePublicKeyTag(ctx context.Context, tenant, fingerprint, tag string) error - UpdatePublicKeyTags(ctx context.Context, tenant, fingerprint string, tags []string) error -} - -// AddPublicKeyTag trys to add a tag to the models.PublicKey, when its filter is from Tags type. -// -// It checks if the models.Namespace and models.PublicKey exists and try to perform the addition action. -func (s *service) AddPublicKeyTag(ctx context.Context, tenant, fingerprint, tag string) error { - // Checks if the namespace exists. - namespace, err := s.store.NamespaceGet(ctx, tenant) - if err != nil || namespace == nil { - return NewErrNamespaceNotFound(tenant, err) - } - - // Checks if the public key exists. - key, err := s.store.PublicKeyGet(ctx, fingerprint, tenant) - if err != nil || key == nil { - return NewErrPublicKeyNotFound(fingerprint, err) - } - - if key.Filter.Hostname != "" { - return NewErrPublicKeyFilter(nil) - } - - if len(key.Filter.Tags) == DeviceMaxTags { - return NewErrTagLimit(DeviceMaxTags, nil) - } - - tags, _, err := s.store.TagsGet(ctx, tenant) - if err != nil { - return NewErrTagEmpty(tenant, err) - } - - if !contains(tags, tag) { - return NewErrTagNotFound(tag, nil) - } - - // Trys to add a public key. - err = s.store.PublicKeyPushTag(ctx, tenant, fingerprint, tag) - if err != nil { - switch err { - case store.ErrNoDocuments: - return ErrDuplicateTagName - default: - return err - } - } - - return nil -} - -// RemovePublicKeyTag trys to remove a tag from the models.PublicKey, when its filter is from Tags type. -func (s *service) RemovePublicKeyTag(ctx context.Context, tenant, fingerprint, tag string) error { - // Checks if the namespace exists. - namespace, err := s.store.NamespaceGet(ctx, tenant) - if err != nil || namespace == nil { - return NewErrNamespaceNotFound(tenant, nil) - } - - // Checks if the public key exists. - key, err := s.store.PublicKeyGet(ctx, fingerprint, tenant) - if err != nil || key == nil { - return NewErrPublicKeyNotFound(fingerprint, err) - } - - if key.Filter.Hostname != "" { - return NewErrPublicKeyFilter(nil) - } - - // Checks if the tag already exists in the device. - if !contains(key.Filter.Tags, tag) { - return NewErrTagNotFound(tag, nil) - } - - // Trys to remove a public key. - err = s.store.PublicKeyPullTag(ctx, tenant, fingerprint, tag) - if err != nil { - return err - } - - return nil -} - -// UpdatePublicKeyTags trys to update the tags of the models.PublicKey, when its filter is from Tags type. -// -// It checks if the models.Namespace and models.PublicKey exists and try to perform the update action. -func (s *service) UpdatePublicKeyTags(ctx context.Context, tenant, fingerprint string, tags []string) error { - if len(tags) > DeviceMaxTags { - return NewErrTagLimit(DeviceMaxTags, nil) - } - - set := func(list []string) []string { - state := make(map[string]bool) - helper := make([]string, 0) - for _, item := range list { - if _, ok := state[item]; !ok { - state[item] = true - helper = append(helper, item) - } - } - - return helper - } - - tags = set(tags) - - namespace, err := s.store.NamespaceGet(ctx, tenant) - if err != nil || namespace == nil { - return NewErrNamespaceNotFound(tenant, nil) - } - - key, err := s.store.PublicKeyGet(ctx, fingerprint, tenant) - if err != nil || key == nil { - return NewErrPublicKeyNotFound(fingerprint, err) - } - - if key.Filter.Hostname != "" { - return NewErrPublicKeyNotFound(fingerprint, nil) - } - - allTags, _, err := s.store.TagsGet(ctx, tenant) - if err != nil { - return NewErrTagEmpty(tenant, err) - } - - for _, tag := range tags { - if !contains(allTags, tag) { - return NewErrTagNotFound(tag, nil) - } - } - - if _, _, err := s.store.PublicKeySetTags(ctx, tenant, fingerprint, tags); err != nil { - return err - } - - return nil -} diff --git a/api/services/sshkeys_tags_test.go b/api/services/sshkeys_tags_test.go deleted file mode 100644 index b98a8048bef..00000000000 --- a/api/services/sshkeys_tags_test.go +++ /dev/null @@ -1,445 +0,0 @@ -package services - -import ( - "context" - "testing" - - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mocks" - storecache "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/pkg/errors" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/assert" -) - -func TestAddPublicKeyTag(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.TODO() - - cases := []struct { - description string - tenant string - fingerprint string - tag string - requiredMocks func() - expected error - }{ - { - description: "fail when namespace was not found", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "tenant").Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrNamespaceNotFound("tenant", errors.New("error", "", 0)), - }, - { - description: "fail when public key was not found", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{TenantID: "tenant"} - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrPublicKeyNotFound("fingerprint", errors.New("error", "", 0)), - }, - { - description: "fail when the tag limit on public key has reached", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2", "tag3"}, - }, - }, - } - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - }, - expected: NewErrTagLimit(DeviceMaxTags, nil), - }, - { - description: "fail when the tag does not exist in a device", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag1", "tag2"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: tags, - }, - }, - } - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(tags, len(tags), nil).Once() - }, - expected: NewErrTagNotFound("tag", nil), - }, - { - description: "fail when cannot add tag to public key", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag", "tag3", "tag6"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - } - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(tags, len(tags), nil).Once() - mock.On("PublicKeyPushTag", ctx, "tenant", "fingerprint", "tag").Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success to add a to public key", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag", "tag3", "tag6"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - } - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(tags, len(tags), nil).Once() - mock.On("PublicKeyPushTag", ctx, "tenant", "fingerprint", "tag").Return(nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - services := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := services.AddPublicKeyTag(ctx, tc.tenant, tc.fingerprint, tc.tag) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} - -func TestRemovePublicKeyTag(t *testing.T) { - mock := &mocks.Store{} - - ctx := context.TODO() - - cases := []struct { - description string - tenant string - fingerprint string - tag string - requiredMocks func() - expected error - }{ - { - description: "fail when namespace was not found", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "tenant").Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrNamespaceNotFound("tenant", nil), - }, - { - description: "fail when public key was not found", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{TenantID: "tenant"} - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrPublicKeyNotFound("fingerprint", errors.New("error", "", 0)), - }, - { - description: "fail when the tag does not exist in public key", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag1", "tag2"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: tags, - }, - }, - } - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - }, - expected: NewErrTagNotFound("tag", nil), - }, - { - description: "fail when remove the tag from public key", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag", "tag1", "tag2"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: tags, - }, - }, - } - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - mock.On("PublicKeyPullTag", ctx, "tenant", "fingerprint", "tag").Return(errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success when remove a from public key", - tenant: "tenant", - fingerprint: "fingerprint", - tag: "tag", - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag", "tag1", "tag2"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: tags, - }, - }, - } - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - mock.On("PublicKeyPullTag", ctx, "tenant", "fingerprint", "tag").Return(nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - services := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := services.RemovePublicKeyTag(ctx, tc.tenant, tc.fingerprint, tc.tag) - assert.Equal(t, tc.expected, err) - }) - } - - mock.AssertExpectations(t) -} - -func TestUpdatePublicKeyTags(t *testing.T) { - mock := &mocks.Store{} - - ctx := context.TODO() - - cases := []struct { - description string - tenant string - fingerprint string - tags []string - requiredMocks func() - expected error - }{ - { - description: "fail when namespace was not found", - tenant: "tenant", - fingerprint: "fingerprint", - tags: []string{"tag1", "tag2", "tag3"}, - requiredMocks: func() { - mock.On("NamespaceGet", ctx, "tenant").Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrNamespaceNotFound("tenant", nil), - }, - { - description: "fail when public key was not found", - tenant: "tenant", - fingerprint: "fingerprint", - tags: []string{"tag1", "tag2", "tag3"}, - requiredMocks: func() { - namespace := &models.Namespace{TenantID: "tenant"} - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(nil, errors.New("error", "", 0)).Once() - }, - expected: NewErrPublicKeyNotFound("fingerprint", errors.New("error", "", 0)), - }, - { - description: "fail when tags are great the tag limit", - tenant: "tenant", - fingerprint: "fingerprint", - tags: []string{"tag4", "tag5", "tag7", "tag5"}, - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - } - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Twice() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - }, - expected: NewErrTagLimit(DeviceMaxTags, nil), - }, - { - description: "fail when a tag does not exist in a device", - tenant: "tenant", - fingerprint: "fingerprint", - tags: []string{"tag2", "tag4", "tag5"}, - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag4", "tag5", "tag7", "tag5"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - } - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Twice() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(tags, len(tags), nil).Once() - }, - expected: NewErrTagNotFound("tag2", nil), - }, - { - description: "fail when update tags in public key fails", - tenant: "tenant", - fingerprint: "fingerprint", - tags: []string{"tag1", "tag2", "tag3"}, - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag1", "tag2", "tag3", "tag4"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - } - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Twice() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(tags, len(tags), nil).Once() - mock.On("PublicKeySetTags", ctx, "tenant", "fingerprint", []string{"tag1", "tag2", "tag3"}).Return(int64(0), int64(0), errors.New("error", "", 0)).Once() - }, - expected: errors.New("error", "", 0), - }, - { - description: "success update tags in public key", - tenant: "tenant", - fingerprint: "fingerprint", - tags: []string{"tag1", "tag2", "tag3"}, - requiredMocks: func() { - namespace := &models.Namespace{ - TenantID: "tenant", - } - tags := []string{"tag1", "tag2", "tag3", "tag4"} - key := &models.PublicKey{ - TenantID: "tenant", - Fingerprint: "fingerprint", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - } - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Twice() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant").Return(key, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(tags, len(tags), nil).Once() - mock.On("PublicKeySetTags", ctx, "tenant", "fingerprint", []string{"tag1", "tag2", "tag3"}).Return(int64(1), int64(1), nil).Once() - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() - - services := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - err := services.UpdatePublicKeyTags(ctx, tc.tenant, tc.fingerprint, tc.tags) - assert.Equal(t, tc.expected, err) - }) - } -} diff --git a/api/services/sshkeys_test.go b/api/services/sshkeys_test.go index 1c74c5eddc2..fa13da78c59 100644 --- a/api/services/sshkeys_test.go +++ b/api/services/sshkeys_test.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mocks" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + storemock "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/api/responses" storecache "github.com/shellhub-io/shellhub/pkg/cache" @@ -14,6 +14,7 @@ import ( "github.com/shellhub-io/shellhub/pkg/errors" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "golang.org/x/crypto/ssh" ) @@ -26,7 +27,7 @@ const ( ) func TestEvaluateKeyFilter(t *testing.T) { - mock := &mocks.Store{} + storeMock := &storemock.Store{} ctx := context.TODO() @@ -75,18 +76,38 @@ func TestEvaluateKeyFilter(t *testing.T) { expected: Expected{true, nil}, }, { - description: "fail to evaluate filter tags when tag does not exist in device", + description: "fail to evaluate filter tags when DeviceResolve fails", key: &models.PublicKey{ PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, + Taggable: models.Taggable{TagIDs: []string{"tag1_id", "tag2_id"}}, }, }, }, - device: models.Device{ - Tags: []string{"tag4"}, + device: models.Device{UID: "uid"}, + requiredMocks: func() { + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid"). + Return(nil, errors.New("error", "", 0)). + Once() + }, + expected: Expected{false, NewErrDeviceNotFound("uid", errors.New("error", "", 0))}, + }, + { + description: "fail to evaluate filter tags when tag does not exist in device", + key: &models.PublicKey{ + PublicKeyFields: models.PublicKeyFields{ + Filter: models.PublicKeyFilter{ + Taggable: models.Taggable{TagIDs: []string{"tag1_id", "tag2_id"}}, + }, + }, }, + device: models.Device{UID: "uid"}, requiredMocks: func() { + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid"). + Return(&models.Device{UID: "uid", Taggable: models.Taggable{TagIDs: []string{"nonexistent_id"}}}, nil). + Once() }, expected: Expected{false, nil}, }, @@ -95,14 +116,16 @@ func TestEvaluateKeyFilter(t *testing.T) { key: &models.PublicKey{ PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, + Taggable: models.Taggable{TagIDs: []string{"tag1_id", "tag2_id"}}, }, }, }, - device: models.Device{ - Tags: []string{"tag1"}, - }, + device: models.Device{UID: "uid"}, requiredMocks: func() { + storeMock. + On("DeviceResolve", ctx, store.DeviceUIDResolver, "uid"). + Return(&models.Device{UID: "uid", Taggable: models.Taggable{TagIDs: []string{"tag1_id"}}}, nil). + Once() }, expected: Expected{true, nil}, }, @@ -124,21 +147,23 @@ func TestEvaluateKeyFilter(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + service := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) ok, err := service.EvaluateKeyFilter(ctx, tc.key, tc.device) assert.Equal(t, tc.expected, Expected{ok, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestListPublicKeys(t *testing.T) { - mock := &mocks.Store{} + storeMock := &storemock.Store{} + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) clockMock.On("Now").Return(now).Twice() - s := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) ctx := context.TODO() @@ -155,28 +180,53 @@ func TestListPublicKeys(t *testing.T) { cases := []struct { description string - ctx context.Context keys []models.PublicKey - query paginator.Query + req *requests.ListPublicKeys requiredMocks func() expected Expected }{ { description: "Fails when the query is invalid", - ctx: ctx, - query: paginator.Query{Page: -1, PerPage: 10}, + req: &requests.ListPublicKeys{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + }, requiredMocks: func() { - mock.On("PublicKeyList", ctx, paginator.Query{Page: -1, PerPage: 10}).Return(nil, 0, errors.New("error", "", 0)).Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("PublicKeyList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, 0, errors.New("error", "", 0)). + Once() }, expected: Expected{nil, 0, errors.New("error", "", 0)}, }, { description: "Successful list the keys", - ctx: ctx, keys: keys, - query: paginator.Query{Page: 1, PerPage: 10}, + req: &requests.ListPublicKeys{ + TenantID: "00000000-0000-4000-0000-000000000000", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + }, requiredMocks: func() { - mock.On("PublicKeyList", ctx, paginator.Query{Page: 1, PerPage: 10}).Return(keys, len(keys), nil).Once() + queryOptionsMock. + On("InNamespace", "00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("PublicKeyList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(keys, len(keys), nil). + Once() }, expected: Expected{keys, len(keys), nil}, }, @@ -185,19 +235,22 @@ func TestListPublicKeys(t *testing.T) { for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - returnedKeys, count, err := s.ListPublicKeys(ctx, tc.query) + returnedKeys, count, err := s.ListPublicKeys(ctx, tc.req) assert.Equal(t, tc.expected, Expected{returnedKeys, count, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } + func TestGetPublicKeys(t *testing.T) { - mock := &mocks.Store{} + storeMock := &storemock.Store{} + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) clockMock.On("Now").Return(now).Twice() - s := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) ctx := context.TODO() @@ -220,7 +273,7 @@ func TestGetPublicKeys(t *testing.T) { fingerprint: "fingerprint", tenantID: InvalidTenantID, requiredMocks: func() { - mock.On("NamespaceGet", ctx, InvalidTenantID).Return(nil, errors.New("error", "", 0)).Once() + storeMock.On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, InvalidTenantID).Return(nil, errors.New("error", "", 0)).Once() }, expected: Expected{nil, NewErrNamespaceNotFound(InvalidTenantID, errors.New("error", "", 0))}, }, @@ -232,8 +285,12 @@ func TestGetPublicKeys(t *testing.T) { requiredMocks: func() { namespace := models.Namespace{TenantID: "tenant1"} - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(&namespace, nil).Once() - mock.On("PublicKeyGet", ctx, InvalidFingerprint, "tenant1").Return(nil, errors.New("error", "", 0)).Once() + storeMock.On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, namespace.TenantID).Return(&namespace, nil).Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, InvalidFingerprint, mock.AnythingOfType("store.QueryOption")).Return(nil, errors.New("error", "", 0)).Once() }, expected: Expected{nil, errors.New("error", "", 0)}, }, @@ -247,8 +304,12 @@ func TestGetPublicKeys(t *testing.T) { key := models.PublicKey{ Data: []byte("teste"), Fingerprint: "fingerprint", CreatedAt: clock.Now(), TenantID: "tenant1", PublicKeyFields: models.PublicKeyFields{Name: "teste"}, } - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(&namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", "tenant1").Return(&key, nil).Once() + storeMock.On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, namespace.TenantID).Return(&namespace, nil).Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(&key, nil).Once() }, expected: Expected{&models.PublicKey{ Data: []byte("teste"), Fingerprint: "fingerprint", CreatedAt: clock.Now(), TenantID: "tenant1", PublicKeyFields: models.PublicKeyFields{Name: "teste"}, @@ -264,15 +325,17 @@ func TestGetPublicKeys(t *testing.T) { }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestUpdatePublicKeys(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() - s := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) type Expected struct { key *models.PublicKey @@ -288,21 +351,48 @@ func TestUpdatePublicKeys(t *testing.T) { expected Expected }{ { - description: "fail update the key when filter tags is empty", + description: "fail when public key not found", + fingerprint: "fingerprint", + tenantID: "tenant", + keyUpdate: requests.PublicKeyUpdate{ + Filter: requests.PublicKeyFilter{ + Tags: []string{"tag1", "tag2"}, + }, + }, + requiredMocks: func() { + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(nil, store.ErrNoDocuments).Once() + }, + expected: Expected{nil, NewErrPublicKeyNotFound("fingerprint", store.ErrNoDocuments)}, + }, + { + description: "fail update the key when tag list retrieval fails", fingerprint: "fingerprint", tenantID: "tenant", keyUpdate: requests.PublicKeyUpdate{ Filter: requests.PublicKeyFilter{ - Tags: []string{}, + Tags: []string{"tag1", "tag2"}, }, }, requiredMocks: func() { - mock.On("TagsGet", ctx, "tenant").Return([]string{}, 0, errors.New("error", "", 0)).Once() + existingKey := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + } + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Twice() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(existingKey, nil).Once() + storeMock.On("TagList", ctx, mock.AnythingOfType("store.QueryOption")).Return(nil, 0, errors.New("error", "", 0)).Once() }, expected: Expected{nil, NewErrTagEmpty("tenant", errors.New("error", "", 0))}, }, { - description: "fail to update the key when a tag does not exist in a device", + description: "fail to update the key when a tag does not exist", fingerprint: "fingerprint", tenantID: "tenant", keyUpdate: requests.PublicKeyUpdate{ @@ -311,12 +401,25 @@ func TestUpdatePublicKeys(t *testing.T) { }, }, requiredMocks: func() { - mock.On("TagsGet", ctx, "tenant").Return([]string{"tag1", "tag4"}, 2, nil).Once() + existingKey := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + } + tags := []models.Tag{ + {ID: "tag1_id", Name: "tag1", TenantID: "tenant"}, + {ID: "tag4_id", Name: "tag4", TenantID: "tenant"}, + } + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Twice() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(existingKey, nil).Once() + storeMock.On("TagList", ctx, mock.AnythingOfType("store.QueryOption")).Return(tags, len(tags), nil).Once() }, expected: Expected{nil, NewErrTagNotFound("tag2", nil)}, }, { - description: "Fail update the key when filter is tags", + description: "fail update the key when filter is tags", fingerprint: "fingerprint", tenantID: "tenant", keyUpdate: requests.PublicKeyUpdate{ @@ -325,21 +428,34 @@ func TestUpdatePublicKeys(t *testing.T) { }, }, requiredMocks: func() { - model := models.PublicKeyUpdate{ + existingKey := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, + Filter: models.PublicKeyFilter{}, }, } + tags := []models.Tag{ + {ID: "tag1_id", Name: "tag1", TenantID: "tenant"}, + {ID: "tag2_id", Name: "tag2", TenantID: "tenant"}, + } - mock.On("TagsGet", ctx, "tenant").Return([]string{"tag1", "tag2"}, 2, nil).Once() - mock.On("PublicKeyUpdate", ctx, "fingerprint", "tenant", &model).Return(nil, errors.New("error", "", 0)).Once() + expectedKey := *existingKey + expectedKey.Filter.TagIDs = []string{"tag1_id", "tag2_id"} + expectedKey.Filter.Tags = nil + + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Twice() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(existingKey, nil).Once() + storeMock.On("TagList", ctx, mock.AnythingOfType("store.QueryOption")).Return(tags, len(tags), nil).Once() + storeMock.On("PublicKeyUpdate", ctx, &expectedKey).Return(errors.New("error", "", 0)).Once() }, expected: Expected{nil, errors.New("error", "", 0)}, }, { - description: "Successful update the key when filter is tags", + description: "successful update the key when filter is tags", fingerprint: "fingerprint", tenantID: "tenant", keyUpdate: requests.PublicKeyUpdate{ @@ -348,35 +464,53 @@ func TestUpdatePublicKeys(t *testing.T) { }, }, requiredMocks: func() { - model := models.PublicKeyUpdate{ + existingKey := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, + Filter: models.PublicKeyFilter{}, }, } + tags := []models.Tag{ + {ID: "tag1_id", Name: "tag1", TenantID: "tenant"}, + {ID: "tag2_id", Name: "tag2", TenantID: "tenant"}, + } + + expectedKey := *existingKey + expectedKey.Filter.TagIDs = []string{"tag1_id", "tag2_id"} + expectedKey.Filter.Tags = nil - keyUpdateWithTagsModel := &models.PublicKey{ + updatedKey := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, + Taggable: models.Taggable{TagIDs: []string{"tag1_id", "tag2_id"}}, }, }, } - mock.On("TagsGet", ctx, "tenant").Return([]string{"tag1", "tag2"}, 2, nil).Once() - mock.On("PublicKeyUpdate", ctx, "fingerprint", "tenant", &model).Return(keyUpdateWithTagsModel, nil).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Times(3) + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(existingKey, nil).Once() + storeMock.On("TagList", ctx, mock.AnythingOfType("store.QueryOption")).Return(tags, len(tags), nil).Once() + storeMock.On("PublicKeyUpdate", ctx, &expectedKey).Return(nil).Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(updatedKey, nil).Once() }, expected: Expected{&models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, + Taggable: models.Taggable{TagIDs: []string{"tag1_id", "tag2_id"}}, }, }, }, nil}, }, { - description: "Fail update the key when filter is hostname", + description: "successful update the key when filter is hostname", fingerprint: "fingerprint", tenantID: "tenant", keyUpdate: requests.PublicKeyUpdate{ @@ -385,46 +519,40 @@ func TestUpdatePublicKeys(t *testing.T) { }, }, requiredMocks: func() { - model := models.PublicKeyUpdate{ + existingKey := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, + Filter: models.PublicKeyFilter{}, }, } - mock.On("PublicKeyUpdate", ctx, "fingerprint", "tenant", &model).Return(nil, errors.New("error", "", 0)).Once() - }, - expected: Expected{nil, errors.New("error", "", 0)}, - }, - { - description: "Successful update the key when filter is tags", - fingerprint: "fingerprint", - tenantID: "tenant", - keyUpdate: requests.PublicKeyUpdate{ - Filter: requests.PublicKeyFilter{ - Hostname: ".*", - }, - }, - requiredMocks: func() { - model := models.PublicKeyUpdate{ - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, - } + expectedKey := *existingKey + expectedKey.Filter.Hostname = ".*" + expectedKey.Filter.TagIDs = []string{} + expectedKey.Filter.Tags = nil - keyUpdateWithHostnameModel := &models.PublicKey{ + updatedKey := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ Hostname: ".*", }, }, } - mock.On("PublicKeyUpdate", ctx, "fingerprint", "tenant", &model).Return(keyUpdateWithHostnameModel, nil).Once() + + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Twice() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(existingKey, nil).Once() + storeMock.On("PublicKeyUpdate", ctx, &expectedKey).Return(nil).Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")).Return(updatedKey, nil).Once() }, expected: Expected{&models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ Hostname: ".*", @@ -443,17 +571,19 @@ func TestUpdatePublicKeys(t *testing.T) { }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestDeletePublicKeys(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() clockMock.On("Now").Return(now).Twice() - s := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) type Expected struct { err error @@ -473,7 +603,7 @@ func TestDeletePublicKeys(t *testing.T) { fingerprint: "fingerprint", tenantID: InvalidTenantID, requiredMocks: func() { - mock.On("NamespaceGet", ctx, InvalidTenantID).Return(nil, errors.New("error", "", 0)).Once() + storeMock.On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, InvalidTenantID).Return(nil, errors.New("error", "", 0)).Once() }, expected: Expected{NewErrNamespaceNotFound(InvalidTenantID, errors.New("error", "", 0))}, }, @@ -485,8 +615,12 @@ func TestDeletePublicKeys(t *testing.T) { requiredMocks: func() { namespace := &models.Namespace{TenantID: "tenant1"} - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, InvalidFingerprint, namespace.TenantID). + storeMock.On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, namespace.TenantID).Return(namespace, nil).Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, InvalidFingerprint, mock.AnythingOfType("store.QueryOption")). Return(nil, errors.New("error", "", 0)).Once() }, expected: Expected{NewErrPublicKeyNotFound(InvalidFingerprint, errors.New("error", "", 0))}, @@ -498,39 +632,49 @@ func TestDeletePublicKeys(t *testing.T) { tenantID: "tenant1", requiredMocks: func() { namespace := &models.Namespace{TenantID: "tenant1"} + publicKey := &models.PublicKey{ + Data: []byte("teste"), + Fingerprint: "fingerprint", + CreatedAt: clock.Now(), + TenantID: "tenant1", + PublicKeyFields: models.PublicKeyFields{Name: "teste"}, + } - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", namespace.TenantID). - Return(&models.PublicKey{ - Data: []byte("teste"), - Fingerprint: "fingerprint", - CreatedAt: clock.Now(), - TenantID: "tenant1", - PublicKeyFields: models.PublicKeyFields{Name: "teste"}, - }, nil).Once() - mock.On("PublicKeyDelete", ctx, "fingerprint", "tenant1"). + storeMock.On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, namespace.TenantID).Return(namespace, nil).Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")). + Return(publicKey, nil).Once() + storeMock.On("PublicKeyDelete", ctx, publicKey). Return(errors.New("error", "", 0)).Once() }, expected: Expected{errors.New("error", "", 0)}, }, { - description: "Successful to delete the key", + description: "successful to delete the key", ctx: ctx, fingerprint: "fingerprint", tenantID: "tenant1", requiredMocks: func() { namespace := &models.Namespace{TenantID: "tenant1"} + publicKey := &models.PublicKey{ + Data: []byte("teste"), + Fingerprint: "fingerprint", + CreatedAt: clock.Now(), + TenantID: "tenant1", + PublicKeyFields: models.PublicKeyFields{Name: "teste"}, + } - mock.On("NamespaceGet", ctx, namespace.TenantID).Return(namespace, nil).Once() - mock.On("PublicKeyGet", ctx, "fingerprint", namespace.TenantID). - Return(&models.PublicKey{ - Data: []byte("teste"), - Fingerprint: "fingerprint", - CreatedAt: clock.Now(), - TenantID: "tenant1", - PublicKeyFields: models.PublicKeyFields{Name: "teste"}, - }, nil).Once() - mock.On("PublicKeyDelete", ctx, "fingerprint", "tenant1").Return(nil).Once() + storeMock.On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, namespace.TenantID).Return(namespace, nil).Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, "fingerprint", mock.AnythingOfType("store.QueryOption")). + Return(publicKey, nil).Once() + storeMock.On("PublicKeyDelete", ctx, publicKey).Return(nil).Once() }, expected: Expected{nil}, }, @@ -545,17 +689,19 @@ func TestDeletePublicKeys(t *testing.T) { }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestCreatePublicKeys(t *testing.T) { - mock := new(mocks.Store) + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() clockMock.On("Now").Return(now) - s := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + s := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) pubKey, _ := ssh.NewPublicKey(publicKey) @@ -572,23 +718,27 @@ func TestCreatePublicKeys(t *testing.T) { expected Expected }{ { - description: "fail to create the key when filter tags is empty", + description: "fail to create the key when tag list retrieval fails", tenantID: "tenant", req: requests.PublicKeyCreate{ Data: ssh.MarshalAuthorizedKey(pubKey), Fingerprint: ssh.FingerprintLegacyMD5(pubKey), TenantID: "tenant", Filter: requests.PublicKeyFilter{ - Tags: []string{}, + Tags: []string{"tag1"}, }, }, requiredMocks: func() { - mock.On("TagsGet", ctx, "tenant").Return([]string{}, 0, errors.New("error", "", 0)).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock.On("TagList", ctx, mock.AnythingOfType("store.QueryOption")).Return(nil, 0, errors.New("error", "", 0)).Once() }, expected: Expected{nil, NewErrTagEmpty("tenant", errors.New("error", "", 0))}, }, { - description: "fail to create the key when a tags does not exist in a device", + description: "fail to create the key when a tag does not exist", tenantID: "tenant", req: requests.PublicKeyCreate{ Data: ssh.MarshalAuthorizedKey(pubKey), @@ -599,7 +749,15 @@ func TestCreatePublicKeys(t *testing.T) { }, }, requiredMocks: func() { - mock.On("TagsGet", ctx, "tenant").Return([]string{"tag1", "tag4"}, 2, nil).Once() + tags := []models.Tag{ + {ID: "tag1_id", Name: "tag1", TenantID: "tenant"}, + {ID: "tag4_id", Name: "tag4", TenantID: "tenant"}, + } + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock.On("TagList", ctx, mock.AnythingOfType("store.QueryOption")).Return(tags, len(tags), nil).Once() }, expected: Expected{nil, NewErrTagNotFound("tag2", nil)}, }, @@ -646,7 +804,11 @@ func TestCreatePublicKeys(t *testing.T) { }, } - mock.On("PublicKeyGet", ctx, keyWithHostname.Fingerprint, "tenant").Return(nil, errors.New("error", "", 0)).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, keyWithHostname.Fingerprint, mock.AnythingOfType("store.QueryOption")).Return(nil, errors.New("error", "", 0)).Once() }, expected: Expected{nil, NewErrPublicKeyNotFound(requests.PublicKeyCreate{ Data: ssh.MarshalAuthorizedKey(pubKey), @@ -690,7 +852,11 @@ func TestCreatePublicKeys(t *testing.T) { }, } - mock.On("PublicKeyGet", ctx, keyWithHostname.Fingerprint, "tenant").Return(&keyWithHostnameModel, nil).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, keyWithHostname.Fingerprint, mock.AnythingOfType("store.QueryOption")).Return(&keyWithHostnameModel, nil).Once() }, expected: Expected{nil, NewErrPublicKeyDuplicated([]string{ssh.FingerprintLegacyMD5(pubKey)}, nil)}, }, @@ -714,6 +880,7 @@ func TestCreatePublicKeys(t *testing.T) { PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ Hostname: ".*", + Taggable: models.Taggable{TagIDs: []string{}, Tags: nil}, }, }, } @@ -727,8 +894,12 @@ func TestCreatePublicKeys(t *testing.T) { }, } - mock.On("PublicKeyGet", ctx, keyWithHostname.Fingerprint, "tenant").Return(nil, nil).Once() - mock.On("PublicKeyCreate", ctx, &keyWithHostnameModel).Return(errors.New("error", "", 0)).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, keyWithHostname.Fingerprint, mock.AnythingOfType("store.QueryOption")).Return(nil, store.ErrNoDocuments).Once() + storeMock.On("PublicKeyCreate", ctx, &keyWithHostnameModel).Return("", errors.New("error", "", 0)).Once() }, expected: Expected{nil, errors.New("error", "", 0)}, }, @@ -752,6 +923,7 @@ func TestCreatePublicKeys(t *testing.T) { PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ Hostname: ".*", + Taggable: models.Taggable{TagIDs: []string{}, Tags: nil}, }, }, } @@ -765,76 +937,23 @@ func TestCreatePublicKeys(t *testing.T) { }, } - mock.On("PublicKeyGet", ctx, keyWithHostname.Fingerprint, "tenant").Return(nil, nil).Once() - mock.On("PublicKeyCreate", ctx, &keyWithHostnameModel).Return(nil).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, keyWithHostname.Fingerprint, mock.AnythingOfType("store.QueryOption")).Return(nil, store.ErrNoDocuments).Once() + storeMock.On("PublicKeyCreate", ctx, &keyWithHostnameModel).Return(ssh.FingerprintLegacyMD5(pubKey), nil).Once() }, expected: Expected{&responses.PublicKeyCreate{ - Data: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, - }.Data, - Filter: responses.PublicKeyFilter(models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, - }.Filter), - Name: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, - }.Name, - Username: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, - }.Username, - TenantID: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, - }.TenantID, - Fingerprint: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, - }.Fingerprint, + Data: ssh.MarshalAuthorizedKey(pubKey), + Filter: responses.PublicKeyFilter{ + Hostname: ".*", + Tags: nil, + }, + Name: "", + Username: "", + TenantID: "tenant", + Fingerprint: ssh.FingerprintLegacyMD5(pubKey), }, nil}, }, { @@ -849,6 +968,11 @@ func TestCreatePublicKeys(t *testing.T) { }, }, requiredMocks: func() { + tags := []models.Tag{ + {ID: "tag1_id", Name: "tag1", TenantID: "tenant"}, + {ID: "tag2_id", Name: "tag2", TenantID: "tenant"}, + } + keyWithTags := requests.PublicKeyCreate{ Data: ssh.MarshalAuthorizedKey(pubKey), Fingerprint: ssh.FingerprintLegacyMD5(pubKey), @@ -865,14 +989,18 @@ func TestCreatePublicKeys(t *testing.T) { TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, + Taggable: models.Taggable{TagIDs: []string{"tag1_id", "tag2_id"}, Tags: nil}, }, }, } - mock.On("TagsGet", ctx, keyWithTags.TenantID).Return([]string{"tag1", "tag2"}, 2, nil).Once() - mock.On("PublicKeyGet", ctx, keyWithTags.Fingerprint, "tenant").Return(nil, nil).Once() - mock.On("PublicKeyCreate", ctx, &keyWithTagsModel).Return(errors.New("error", "", 0)).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Twice() + storeMock.On("TagList", ctx, mock.AnythingOfType("store.QueryOption")).Return(tags, len(tags), nil).Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, keyWithTags.Fingerprint, mock.AnythingOfType("store.QueryOption")).Return(nil, store.ErrNoDocuments).Once() + storeMock.On("PublicKeyCreate", ctx, &keyWithTagsModel).Return("", errors.New("error", "", 0)).Once() }, expected: Expected{nil, errors.New("error", "", 0)}, }, @@ -888,6 +1016,11 @@ func TestCreatePublicKeys(t *testing.T) { }, }, requiredMocks: func() { + tags := []models.Tag{ + {ID: "tag1_id", Name: "tag1", TenantID: "tenant"}, + {ID: "tag2_id", Name: "tag2", TenantID: "tenant"}, + } + keyWithTags := requests.PublicKeyCreate{ Data: ssh.MarshalAuthorizedKey(pubKey), Fingerprint: ssh.FingerprintLegacyMD5(pubKey), @@ -904,82 +1037,29 @@ func TestCreatePublicKeys(t *testing.T) { TenantID: "tenant", PublicKeyFields: models.PublicKeyFields{ Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, + Taggable: models.Taggable{TagIDs: []string{"tag1_id", "tag2_id"}, Tags: nil}, }, }, } - mock.On("TagsGet", ctx, keyWithTags.TenantID).Return([]string{"tag1", "tag2"}, 2, nil).Once() - mock.On("PublicKeyGet", ctx, keyWithTags.Fingerprint, "tenant").Return(nil, nil).Once() - mock.On("PublicKeyCreate", ctx, &keyWithTagsModel).Return(nil).Once() + queryOptionsMock. + On("InNamespace", "tenant"). + Return(nil). + Twice() + storeMock.On("TagList", ctx, mock.AnythingOfType("store.QueryOption")).Return(tags, len(tags), nil).Once() + storeMock.On("PublicKeyResolve", ctx, store.PublicKeyFingerprintResolver, keyWithTags.Fingerprint, mock.AnythingOfType("store.QueryOption")).Return(nil, store.ErrNoDocuments).Once() + storeMock.On("PublicKeyCreate", ctx, &keyWithTagsModel).Return(ssh.FingerprintLegacyMD5(pubKey), nil).Once() }, expected: Expected{&responses.PublicKeyCreate{ - Data: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - }.Data, - Filter: responses.PublicKeyFilter(models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - }.Filter), - Name: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - }.Name, - Username: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - }.Username, - TenantID: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - }.TenantID, - Fingerprint: models.PublicKey{ - Data: ssh.MarshalAuthorizedKey(pubKey), - Fingerprint: ssh.FingerprintLegacyMD5(pubKey), - CreatedAt: clock.Now(), - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, - }.Fingerprint, + Data: ssh.MarshalAuthorizedKey(pubKey), + Filter: responses.PublicKeyFilter{ + Hostname: "", + Tags: []string{"tag1", "tag2"}, + }, + Name: "", + Username: "", + TenantID: "tenant", + Fingerprint: ssh.FingerprintLegacyMD5(pubKey), }, nil}, }, } @@ -993,5 +1073,5 @@ func TestCreatePublicKeys(t *testing.T) { }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } diff --git a/api/services/stats.go b/api/services/stats.go index 4a46e8535a2..0cbdbb1c689 100644 --- a/api/services/stats.go +++ b/api/services/stats.go @@ -3,13 +3,14 @@ package services import ( "context" + "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" ) type StatsService interface { - GetStats(ctx context.Context) (*models.Stats, error) + GetStats(ctx context.Context, req *requests.GetStats) (*models.Stats, error) } -func (s *service) GetStats(ctx context.Context) (*models.Stats, error) { - return s.store.GetStats(ctx) +func (s *service) GetStats(ctx context.Context, req *requests.GetStats) (*models.Stats, error) { + return s.store.GetStats(ctx, req.TenantID) } diff --git a/api/services/stats_test.go b/api/services/stats_test.go new file mode 100644 index 00000000000..8eeaf6c6b48 --- /dev/null +++ b/api/services/stats_test.go @@ -0,0 +1,114 @@ +package services + +import ( + "context" + "errors" + "testing" + + "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/requests" + storecache "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + gomock "github.com/stretchr/testify/mock" +) + +func TestGetStats(t *testing.T) { + storeMock := &mocks.Store{} + + ctx := context.Background() + + cases := []struct { + description string + req *requests.GetStats + expectedStats *models.Stats + expectedError error + requiredMocks func() + }{ + { + description: "fail when store returns error", + req: &requests.GetStats{ + TenantID: "00000000-0000-4000-0000-000000000000", + }, + expectedStats: nil, + expectedError: errors.New("store error"), + requiredMocks: func() { + storeMock. + On("GetStats", gomock.Anything, "00000000-0000-4000-0000-000000000000"). + Return(nil, errors.New("store error")). + Once() + }, + }, + { + description: "success when getting stats without tenantID", + req: &requests.GetStats{ + TenantID: "", + }, + expectedStats: &models.Stats{ + RegisteredDevices: 10, + OnlineDevices: 5, + ActiveSessions: 15, + PendingDevices: 2, + RejectedDevices: 1, + }, + expectedError: nil, + requiredMocks: func() { + storeMock.On("GetStats", gomock.Anything, ""). + Return( + &models.Stats{ + RegisteredDevices: 10, + OnlineDevices: 5, + ActiveSessions: 15, + PendingDevices: 2, + RejectedDevices: 1, + }, + nil, + ). + Once() + }, + }, + { + description: "success when getting stats with tenantID", + req: &requests.GetStats{ + TenantID: "00000000-0000-4000-0000-000000000000", + }, + expectedStats: &models.Stats{ + RegisteredDevices: 3, + OnlineDevices: 2, + ActiveSessions: 5, + PendingDevices: 1, + RejectedDevices: 0, + }, + expectedError: nil, + requiredMocks: func() { + storeMock. + On("GetStats", gomock.Anything, "00000000-0000-4000-0000-000000000000"). + Return( + &models.Stats{ + RegisteredDevices: 3, + OnlineDevices: 2, + ActiveSessions: 5, + PendingDevices: 1, + RejectedDevices: 0, + }, + nil, + ). + Once() + }, + }, + } + + s := NewService(storeMock, privateKey, publicKey, storecache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + stats, err := s.GetStats(ctx, tc.req) + assert.Equal(t, tc.expectedStats, stats) + assert.Equal(t, tc.expectedError, err) + }) + } + + storeMock.AssertExpectations(t) +} diff --git a/api/services/system.go b/api/services/system.go index a7ae7540ec6..fad8496de8b 100644 --- a/api/services/system.go +++ b/api/services/system.go @@ -3,56 +3,57 @@ package services import ( "context" "fmt" + "os" "strings" - "text/template" + "github.com/shellhub-io/shellhub/api/pkg/responses" "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/envs" - "github.com/shellhub-io/shellhub/pkg/models" ) type SystemService interface { - SystemGetInfo(ctx context.Context, req requests.SystemGetInfo) (*models.SystemInfo, error) - SystemDownloadInstallScript(ctx context.Context, req requests.SystemInstallScript) (*template.Template, map[string]interface{}, error) + // GetSystemInfo retrieves the instance's information + GetSystemInfo(ctx context.Context, req *requests.GetSystemInfo) (*responses.SystemInfo, error) + + SystemDownloadInstallScript(ctx context.Context) (string, error) } -// SystemGetInfo returns system instance information. -// It receives a context (ctx) and requests.SystemGetInfo, what contains a host (host) which is used to determine the -// API and SSH host of the system, and a port (port) that can be specified to override the API port from the host. -func (s *service) SystemGetInfo(_ context.Context, req requests.SystemGetInfo) (*models.SystemInfo, error) { +func (s *service) GetSystemInfo(ctx context.Context, req *requests.GetSystemInfo) (*responses.SystemInfo, error) { + system, err := s.store.SystemGet(ctx) + if err != nil { + return nil, err + } + apiHost := strings.Split(req.Host, ":")[0] sshPort := envs.DefaultBackend.Get("SHELLHUB_SSH_PORT") - info := &models.SystemInfo{ + resp := &responses.SystemInfo{ Version: envs.DefaultBackend.Get("SHELLHUB_VERSION"), - Endpoints: &models.SystemInfoEndpoints{ + Setup: system.Setup, + Endpoints: &responses.SystemEndpointsInfo{ API: apiHost, SSH: fmt.Sprintf("%s:%s", apiHost, sshPort), }, + Authentication: &responses.SystemAuthenticationInfo{ + Local: system.Authentication.Local.Enabled, + SAML: system.Authentication.SAML.Enabled, + }, } if req.Port > 0 { - info.Endpoints.API = fmt.Sprintf("%s:%d", apiHost, req.Port) + resp.Endpoints.API = fmt.Sprintf("%s:%d", apiHost, req.Port) } else { - info.Endpoints.API = req.Host + resp.Endpoints.API = req.Host } - return info, nil + return resp, nil } -func (s *service) SystemDownloadInstallScript(_ context.Context, req requests.SystemInstallScript) (*template.Template, map[string]interface{}, error) { - tmpl, err := template.ParseFiles("./templates/kickstart.sh") +func (s *service) SystemDownloadInstallScript(_ context.Context) (string, error) { + data, err := os.ReadFile("/templates/install.sh") if err != nil { - return nil, nil, err + return "", err } - return tmpl, map[string]interface{}{ - "scheme": req.Scheme, - "host": req.Host, - "tenant_id": req.TenantID, - "keepalive_interval": req.KeepAliveInternavel, - "preferred_hostname": req.PreferredHostname, - "preferred_identity": req.PreferredIdentity, - "version": envs.DefaultBackend.Get("SHELLHUB_VERSION"), - }, nil + return string(data), nil } diff --git a/api/services/tags.go b/api/services/tags.go index d43ddafcaf8..19af3b392db 100644 --- a/api/services/tags.go +++ b/api/services/tags.go @@ -2,68 +2,173 @@ package services import ( "context" + "strings" + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" ) type TagsService interface { - GetTags(ctx context.Context, tenant string) ([]string, int, error) - RenameTag(ctx context.Context, tenant string, oldTag string, newTag string) error - DeleteTag(ctx context.Context, tenant string, tag string) error + // CreateTag creates a new tag in the specified tenant namespace. + // + // Tags can share the same attributes (e.g. name) if they belong to different tenants. + // For example, tenant1 and tenant2 can each have a tag named "production". + // + // It returns the insertedID, an array of conflicting field names, e.g. `["name"]` and an error if any. + CreateTag(ctx context.Context, req *requests.CreateTag) (insertedID string, conflicts []string, err error) + + // PushTagTo adds an existing tag in a namespace to a target document (e.g. Device, PublicKey, FirewallRule). + // + // Returns ErrNamespaceNotFound if namespace not found, ErrTagNotFound if tag not found, or other errors if operation fails. + PushTagTo(ctx context.Context, target store.TagTarget, req *requests.PushTag) (err error) + + // PullTagFrom removes a tag from a target document in a namespace. The tag itself is not deleted. + // If no other documents reference the tag, it becomes orphaned but remains available for future use. + // + // Returns ErrNamespaceNotFound if namespace not found, ErrTagNotFound if tag not found, or other errors if operation fails. + PullTagFrom(ctx context.Context, target store.TagTarget, req *requests.PullTag) (err error) + + // ListTags retrieves a batch of tags that belong to the given namespace. + // + // It returns the list of tags with pagination, an integer representing the total count of tags in the + // database, ignoring pagination, and an error if any. + ListTags(ctx context.Context, req *requests.ListTags) (tags []models.Tag, totalCount int, err error) + + // UpdateTag updates a tag with the specified name in the specified namespace. + // + // It returns an array of conflicting field names, e.g. `["name"]` and an error if any. + UpdateTag(ctx context.Context, req *requests.UpdateTag) (conflicts []string, err error) + + // DeleteTag deletes a tag with the specified name in the specified namespace. + // + // It returns an error if any. + DeleteTag(ctx context.Context, req *requests.DeleteTag) (err error) } -func (s *service) GetTags(ctx context.Context, tenant string) ([]string, int, error) { - namespace, err := s.store.NamespaceGet(ctx, tenant) - if err != nil || namespace == nil { - return nil, 0, NewErrNamespaceNotFound(tenant, err) +func (s *service) CreateTag(ctx context.Context, req *requests.CreateTag) (string, []string, error) { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID); err != nil { + return "", []string{}, NewErrNamespaceNotFound(req.TenantID, err) } - return s.store.TagsGet(ctx, namespace.TenantID) + if conflicts, has, err := s.store.TagConflicts(ctx, req.TenantID, &models.TagConflicts{Name: req.Name}); has || err != nil { + return "", conflicts, err + } + + insertedID, err := s.store.TagCreate(ctx, &models.Tag{Name: req.Name, TenantID: req.TenantID}) + if err != nil { + return "", []string{}, err + } + + return insertedID, []string{}, nil +} + +func (s *service) PushTagTo(ctx context.Context, target store.TagTarget, req *requests.PushTag) (err error) { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID); err != nil { + return NewErrNamespaceNotFound(req.TenantID, err) + } + + tag, err := s.store.TagResolve(ctx, store.TagNameResolver, req.Name, s.store.Options().InNamespace(req.TenantID)) + if err != nil { + return NewErrTagNotFound(req.Name, err) + } + + return s.store.TagPushToTarget(ctx, tag.ID, target, req.TargetID) +} + +func (s *service) PullTagFrom(ctx context.Context, target store.TagTarget, req *requests.PullTag) (err error) { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID); err != nil { + return NewErrNamespaceNotFound(req.TenantID, err) + } + + tag, err := s.store.TagResolve(ctx, store.TagNameResolver, req.Name, s.store.Options().InNamespace(req.TenantID)) + if err != nil { + return NewErrTagNotFound(req.Name, err) + } + + return s.store.TagPullFromTarget(ctx, tag.ID, target, req.TargetID) } -func (s *service) RenameTag(ctx context.Context, tenant string, oldTag string, newTag string) error { - if ok, err := s.validator.Struct(models.NewDeviceTag(newTag)); !ok || err != nil { - return NewErrTagInvalid(newTag, err) +func (s *service) ListTags(ctx context.Context, req *requests.ListTags) ([]models.Tag, int, error) { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID); err != nil { + return []models.Tag{}, 0, NewErrNamespaceNotFound(req.TenantID, err) } - tags, count, err := s.store.TagsGet(ctx, tenant) - if err != nil || count == 0 { - return NewErrTagEmpty(tenant, err) + if req.Sorter.By == "" { + req.Sorter.By = "created_at" } - if !contains(tags, oldTag) { - return NewErrTagNotFound(oldTag, nil) + if req.Sorter.Order == "" { + req.Sorter.Order = query.OrderDesc } - if contains(tags, newTag) { - return NewErrTagDuplicated(newTag, nil) + opts := []store.QueryOption{ + s.store.Options().InNamespace(req.TenantID), + s.store.Options().Match(&req.Filters), + s.store.Options().Sort(&req.Sorter), + s.store.Options().Paginate(&req.Paginator), } - _, err = s.store.TagsRename(ctx, tenant, oldTag, newTag) + tags, totalCount, err := s.store.TagList(ctx, opts...) + if err != nil { + return []models.Tag{}, 0, err + } - return err + return tags, totalCount, nil } -func (s *service) DeleteTag(ctx context.Context, tenant string, tag string) error { - if ok, err := s.validator.Struct(models.NewDeviceTag(tag)); !ok || err != nil { - return NewErrTagInvalid(tag, err) +func (s *service) UpdateTag(ctx context.Context, req *requests.UpdateTag) ([]string, error) { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID); err != nil { + return []string{}, NewErrNamespaceNotFound(req.TenantID, err) + } + + tag, err := s.store.TagResolve(ctx, store.TagNameResolver, req.Name, s.store.Options().InNamespace(req.TenantID)) + if err != nil { + return []string{}, NewErrTagNotFound(req.Name, err) + } + + conflictsAttrs := &models.TagConflicts{} + if req.NewName != "" && req.NewName != req.Name { + conflictsAttrs.Name = req.NewName } - namespace, err := s.store.NamespaceGet(ctx, tenant) - if err != nil || namespace == nil { - return NewErrNamespaceNotFound(tenant, err) + if conflicts, has, err := s.store.TagConflicts(ctx, req.TenantID, conflictsAttrs); has || err != nil { + return conflicts, NewErrTagDuplicated(req.NewName, err) } - tags, count, err := s.store.TagsGet(ctx, namespace.TenantID) - if err != nil || count == 0 { - return NewErrTagEmpty(tenant, err) + if req.NewName != "" && !strings.EqualFold(req.NewName, tag.Name) { + tag.Name = req.NewName } - if !contains(tags, tag) { - return NewErrTagNotFound(tag, nil) + if err := s.store.TagUpdate(ctx, tag); err != nil { + return nil, err } - _, err = s.store.TagsDelete(ctx, namespace.TenantID, tag) + return []string{}, nil +} + +func (s *service) DeleteTag(ctx context.Context, req *requests.DeleteTag) error { + return s.store.WithTransaction(ctx, s.deleteTagCallback(req)) +} + +func (s *service) deleteTagCallback(req *requests.DeleteTag) store.TransactionCb { + return func(ctx context.Context) error { + if _, err := s.store.NamespaceResolve(ctx, store.NamespaceTenantIDResolver, req.TenantID); err != nil { + return NewErrNamespaceNotFound(req.TenantID, err) + } - return err + tag, err := s.store.TagResolve(ctx, store.TagNameResolver, req.Name, s.store.Options().InNamespace(req.TenantID)) + if err != nil { + return NewErrTagNotFound(req.Name, err) + } + + for _, target := range store.TagTargets() { + if err := s.store.TagPullFromTarget(ctx, tag.ID, target); err != nil { + return err + } + } + + return s.store.TagDelete(ctx, tag) + } } diff --git a/api/services/tags_test.go b/api/services/tags_test.go index a63e3b6ed11..ba84ec8be42 100644 --- a/api/services/tags_test.go +++ b/api/services/tags_test.go @@ -2,357 +2,885 @@ package services import ( "context" + "errors" "testing" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mocks" - storecache "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/pkg/errors" - mocksGeoIp "github.com/shellhub-io/shellhub/pkg/geoip/mocks" + storemock "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/pkg/validator" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" ) -func TestGetTags(t *testing.T) { - mock := new(mocks.Store) - +func TestService_CreateTag(t *testing.T) { + storeMock := new(storemock.Store) ctx := context.TODO() type Expected struct { - Tags []string - Count int - Error error + insertedID string + conflicts []string + err error } cases := []struct { - name string - uid models.UID - tenantID string + description string + req *requests.CreateTag requiredMocks func() expected Expected }{ { - name: "fail when namespace is not found", - tenantID: "not_found_tenant", + description: "fails when namespace not found", + req: &requests.CreateTag{ + Name: "production", + TenantID: "tenant1", + }, requiredMocks: func() { - mock.On("NamespaceGet", ctx, "not_found_tenant").Return(nil, errors.New("error", "", 0)).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(nil, errors.New("error")). + Once() }, expected: Expected{ - Tags: nil, - Count: 0, - Error: NewErrNamespaceNotFound("not_found_tenant", errors.New("error", "", 0)), + insertedID: "", + conflicts: []string{}, + err: NewErrNamespaceNotFound("tenant1", errors.New("error")), }, }, { - name: "fail when store function to get tags fails", - tenantID: "tenant", + description: "fails when tag name conflicts", + req: &requests.CreateTag{ + Name: "production", + TenantID: "tenant1", + }, requiredMocks: func() { - namespace := &models.Namespace{Name: "namespace", TenantID: "tenant"} - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(nil, 0, errors.New("error", "", 0)).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + storeMock. + On("TagConflicts", ctx, "tenant1", &models.TagConflicts{Name: "production"}). + Return([]string{"name"}, true, nil). + Once() }, expected: Expected{ - Tags: nil, - Count: 0, - Error: errors.New("error", "", 0), + insertedID: "", + conflicts: []string{"name"}, + err: nil, }, }, { - name: "success to get tags", - tenantID: "tenant", + description: "fails when tag create fails", + req: &requests.CreateTag{ + Name: "production", + TenantID: "tenant1", + }, requiredMocks: func() { - device := &models.Device{ - UID: "uid", - Namespace: "namespace", - TenantID: "tenant", - Tags: []string{"device1", "device2"}, - } - - namespace := &models.Namespace{Name: "namespace", TenantID: "tenant"} - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(device.Tags, len(device.Tags), nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + storeMock. + On("TagConflicts", ctx, "tenant1", &models.TagConflicts{Name: "production"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("TagCreate", ctx, &models.Tag{Name: "production", TenantID: "tenant1"}). + Return("", errors.New("error")). + Once() + }, + expected: Expected{ + insertedID: "", + conflicts: []string{}, + err: errors.New("error"), + }, + }, + { + description: "succeeds creating tag", + req: &requests.CreateTag{ + Name: "production", + TenantID: "tenant1", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + storeMock. + On("TagConflicts", ctx, "tenant1", &models.TagConflicts{Name: "production"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("TagCreate", ctx, &models.Tag{Name: "production", TenantID: "tenant1"}). + Return("000000000000000000000000", nil). + Once() }, expected: Expected{ - Tags: []string{"device1", "device2"}, - Count: len([]string{"device1", "device2"}), - Error: nil, + insertedID: "000000000000000000000000", + conflicts: []string{}, + err: nil, }, }, } + service := NewService(storeMock, privateKey, publicKey, nil, nil) + for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { + t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - locator := &mocksGeoIp.Locator{} - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, locator) - - tags, count, err := service.GetTags(ctx, tc.tenantID) - assert.Equal(t, tc.expected, Expected{tags, count, err}) + insertedID, conflicts, err := service.CreateTag(ctx, tc.req) + require.Equal(t, tc.expected, Expected{insertedID, conflicts, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } -func TestRenameTag(t *testing.T) { - mock := new(mocks.Store) +func TestService_PushTagTo(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() cases := []struct { - name string - tenantID string - currentTag string - newTag string + description string + target store.TagTarget + req *requests.PushTag requiredMocks func() expected error }{ { - name: "fail when tag is invalid", - tenantID: "tenant", - currentTag: "currentTag", - newTag: "invalid_tag", - requiredMocks: func() {}, - expected: NewErrTagInvalid("invalid_tag", validator.ErrStructureInvalid), + description: "fails when namespace not found", + target: store.TagTargetDevice, + req: &requests.PushTag{ + Name: "production", + TenantID: "tenant1", + TargetID: "device_00000000-0000-4000-0000-000000000000", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(nil, errors.New("error")). + Once() + }, + expected: NewErrNamespaceNotFound("tenant1", errors.New("error")), }, { - name: "fail when device has no tags", - tenantID: "namespaceTenantIDNoTag", - currentTag: "device3", - newTag: "device1", + description: "fails when tag not found", + target: store.TagTargetDevice, + req: &requests.PushTag{ + Name: "production", + TenantID: "tenant1", + TargetID: "device_00000000-0000-4000-0000-000000000000", + }, requiredMocks: func() { - mock.On("TagsGet", ctx, "namespaceTenantIDNoTag").Return(nil, 0, errors.New("error", "", 0)) + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error")). + Once() }, - expected: NewErrTagEmpty("namespaceTenantIDNoTag", errors.New("error", "", 0)), + expected: NewErrTagNotFound("production", errors.New("error")), }, { - name: "fail when device don't have the tag", - tenantID: "namespaceTenantID", - currentTag: "device2", - newTag: "device1", + description: "fails when tag push fails", + target: store.TagTargetDevice, + req: &requests.PushTag{ + Name: "production", + TenantID: "tenant1", + TargetID: "device_00000000-0000-4000-0000-000000000000", + }, requiredMocks: func() { - namespace := &models.Namespace{ - Name: "namespaceName", - Owner: "owner", - TenantID: "namespaceTenantID", - } - - deviceWithTags := &models.Device{ - UID: "deviceWithTagsUID", - Name: "deviceWithTagsName", - TenantID: "deviceWithTagsTenantID", - Tags: []string{"device3", "device4", "device5"}, - } - - mock.On("TagsGet", ctx, namespace.TenantID).Return(deviceWithTags.Tags, len(deviceWithTags.Tags), nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(&models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000"}, nil). + Once() + storeMock. + On("TagPushToTarget", ctx, "tag_00000000-0000-4000-0000-000000000000", store.TagTargetDevice, "device_00000000-0000-4000-0000-000000000000"). + Return(errors.New("error")). + Once() }, - expected: NewErrTagNotFound("device2", nil), + expected: errors.New("error"), }, { - name: "fail when device already have the tag", - tenantID: "namespaceTenantID", - currentTag: "device3", - newTag: "device5", + description: "succeeds pushing tag", + target: store.TagTargetDevice, + req: &requests.PushTag{ + Name: "production", + TenantID: "tenant1", + TargetID: "device_00000000-0000-4000-0000-000000000000", + }, requiredMocks: func() { - namespace := &models.Namespace{ - Name: "namespaceName", - Owner: "owner", - TenantID: "namespaceTenantID", - } + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(&models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000"}, nil). + Once() + storeMock. + On("TagPushToTarget", ctx, "tag_00000000-0000-4000-0000-000000000000", store.TagTargetDevice, "device_00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() + }, + expected: nil, + }, + } - deviceWithTags := &models.Device{ - UID: "deviceWithTagsUID", - Name: "deviceWithTagsName", - TenantID: "deviceWithTagsTenantID", - Tags: []string{"device3", "device4", "device5"}, - } + service := NewService(storeMock, privateKey, publicKey, nil, nil) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + err := service.PushTagTo(ctx, tc.target, tc.req) + require.Equal(t, tc.expected, err) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestService_PullTagFrom(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + ctx := context.TODO() - mock.On("TagsGet", ctx, namespace.TenantID).Return(deviceWithTags.Tags, len(deviceWithTags.Tags), nil).Once() + cases := []struct { + description string + target store.TagTarget + req *requests.PullTag + requiredMocks func() + expected error + }{ + { + description: "fails when namespace not found", + target: store.TagTargetDevice, + req: &requests.PullTag{ + Name: "production", + TenantID: "tenant1", + TargetID: "device_00000000-0000-4000-0000-000000000000", }, - expected: NewErrTagDuplicated("device5", nil), + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(nil, errors.New("error")). + Once() + }, + expected: NewErrNamespaceNotFound("tenant1", errors.New("error")), }, { - name: "fail when the store function to rename the tag fails", - tenantID: "namespaceTenantID", - currentTag: "device3", - newTag: "device1", + description: "fails when tag not found", + target: store.TagTargetDevice, + req: &requests.PullTag{ + Name: "production", + TenantID: "tenant1", + TargetID: "device_00000000-0000-4000-0000-000000000000", + }, requiredMocks: func() { - namespace := &models.Namespace{ - Name: "namespaceName", - Owner: "owner", - TenantID: "namespaceTenantID", - } - - deviceWithTags := &models.Device{ - UID: "deviceWithTagsUID", - Name: "deviceWithTagsName", - TenantID: "deviceWithTagsTenantID", - Tags: []string{"device3", "device4", "device5"}, - } - - mock.On("TagsGet", ctx, namespace.TenantID).Return(deviceWithTags.Tags, len(deviceWithTags.Tags), nil).Once() - mock.On("TagsRename", ctx, namespace.TenantID, "device3", "device1").Return(int64(0), errors.New("error", "", 0)).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error")). + Once() }, - expected: errors.New("error", "", 0), + expected: NewErrTagNotFound("production", errors.New("error")), }, { - name: "success to rename the tag", - tenantID: "namespaceTenantID", - currentTag: "device3", - newTag: "device1", + description: "fails when tag pull fails", + target: store.TagTargetDevice, + req: &requests.PullTag{ + Name: "production", + TenantID: "tenant1", + TargetID: "device_00000000-0000-4000-0000-000000000000", + }, requiredMocks: func() { - namespace := &models.Namespace{ - Name: "namespaceName", - Owner: "owner", - TenantID: "namespaceTenantID", - } - - deviceWithTags := &models.Device{ - UID: "deviceWithTagsUID", - Name: "deviceWithTagsName", - TenantID: "deviceWithTagsTenantID", - Tags: []string{"device3", "device4", "device5"}, - } - - mock.On("TagsGet", ctx, namespace.TenantID).Return(deviceWithTags.Tags, len(deviceWithTags.Tags), nil).Once() - mock.On("TagsRename", ctx, namespace.TenantID, "device3", "device1").Return(int64(1), nil).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(&models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000"}, nil). + Once() + storeMock. + On("TagPullFromTarget", ctx, "tag_00000000-0000-4000-0000-000000000000", store.TagTargetDevice, "device_00000000-0000-4000-0000-000000000000"). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "succeeds pulling tag", + target: store.TagTargetDevice, + req: &requests.PullTag{ + Name: "production", + TenantID: "tenant1", + TargetID: "device_00000000-0000-4000-0000-000000000000", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(&models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000"}, nil). + Once() + storeMock. + On("TagPullFromTarget", ctx, "tag_00000000-0000-4000-0000-000000000000", store.TagTargetDevice, "device_00000000-0000-4000-0000-000000000000"). + Return(nil). + Once() }, expected: nil, }, } + service := NewService(storeMock, privateKey, publicKey, nil, nil) + for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { + t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - locator := &mocksGeoIp.Locator{} - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, locator) - - err := service.RenameTag(ctx, tc.tenantID, tc.currentTag, tc.newTag) - assert.Equal(t, tc.expected, err) + err := service.PullTagFrom(ctx, tc.target, tc.req) + require.Equal(t, tc.expected, err) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } -func TestDeleteTag(t *testing.T) { - mock := new(mocks.Store) +func TestService_ListTags(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) ctx := context.TODO() + type Expected struct { + tags []models.Tag + totalCount int + err error + } + cases := []struct { - name string - tag string - tenant string + description string + req *requests.ListTags requiredMocks func() - expected error + expected Expected }{ { - name: "fail when tag is invalid", - tag: "invalid_tag", - tenant: "tenant", + description: "fails when namespace not found", + req: &requests.ListTags{ + TenantID: "tenant1", + Paginator: query.Paginator{ + Page: 1, + PerPage: 10, + }, + }, requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(nil, errors.New("error")). + Once() + }, + expected: Expected{ + tags: []models.Tag{}, + totalCount: 0, + err: NewErrNamespaceNotFound("tenant1", errors.New("error")), }, - expected: NewErrTagInvalid("invalid_tag", validator.ErrStructureInvalid), }, { - name: "fail when could not find the namespace", - tag: "device1", - tenant: "not_found_tenant", + description: "fails when tag list fails", + req: &requests.ListTags{ + TenantID: "tenant1", + Paginator: query.Paginator{Page: 1, PerPage: 10}, + }, requiredMocks: func() { - mock.On("NamespaceGet", ctx, "not_found_tenant").Return(nil, errors.New("error", "", 0)).Once() + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderDesc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("TagList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return(nil, 0, errors.New("error")). + Once() + }, + expected: Expected{ + tags: []models.Tag{}, + totalCount: 0, + err: errors.New("error"), }, - expected: NewErrNamespaceNotFound("not_found_tenant", errors.New("error", "", 0)), }, { - name: "fail when tags are empty", - tag: "device1", - tenant: "tenant", + description: "succeeds listing tags", + req: &requests.ListTags{ + TenantID: "tenant1", + Paginator: query.Paginator{ + Page: 1, + PerPage: 10, + }, + }, requiredMocks: func() { - namespace := &models.Namespace{Name: "namespace", TenantID: "tenant"} + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + queryOptionsMock. + On("Match", &query.Filters{}). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &query.Sorter{By: "created_at", Order: query.OrderDesc}). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 10}). + Return(nil). + Once() + storeMock. + On("TagList", ctx, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Tag{{Name: "production", TenantID: "tenant1"}}, 1, nil). + Once() + }, + expected: Expected{ + tags: []models.Tag{{Name: "production", TenantID: "tenant1"}}, + totalCount: 1, + err: nil, + }, + }, + } - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(nil, 0, errors.New("error", "", 0)).Once() + service := NewService(storeMock, privateKey, publicKey, nil, nil) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + tags, count, err := service.ListTags(ctx, tc.req) + require.Equal(t, tc.expected, Expected{tags, count, err}) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestService_UpdateTag(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + ctx := context.TODO() + + type Expected struct { + conflicts []string + err error + } + + cases := []struct { + description string + req *requests.UpdateTag + requiredMocks func() + expected Expected + }{ + { + description: "fails when namespace not found", + req: &requests.UpdateTag{ + Name: "production", + NewName: "staging", + TenantID: "tenant1", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(nil, errors.New("error")). + Once() + }, + expected: Expected{ + conflicts: []string{}, + err: NewErrNamespaceNotFound("tenant1", errors.New("error")), + }, + }, + { + description: "fails when tag not found", + req: &requests.UpdateTag{ + Name: "production", + NewName: "staging", + TenantID: "tenant1", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error")). + Once() + }, + expected: Expected{ + conflicts: []string{}, + err: NewErrTagNotFound("production", errors.New("error")), + }, + }, + { + description: "fails when new name conflicts", + req: &requests.UpdateTag{ + Name: "production", + NewName: "staging", + TenantID: "tenant1", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(&models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000", Name: "production"}, nil). + Once() + storeMock. + On("TagConflicts", ctx, "tenant1", &models.TagConflicts{Name: "staging"}). + Return([]string{"name"}, true, nil). + Once() + }, + expected: Expected{ + conflicts: []string{"name"}, + err: NewErrTagDuplicated("staging", nil), + }, + }, + { + description: "fails when tag update fails", + req: &requests.UpdateTag{ + Name: "production", + NewName: "staging", + TenantID: "tenant1", + }, + requiredMocks: func() { + tag := &models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000", Name: "production"} + updatedTag := &models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000", Name: "staging"} + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(tag, nil). + Once() + storeMock. + On("TagConflicts", ctx, "tenant1", &models.TagConflicts{Name: "staging"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("TagUpdate", ctx, updatedTag). + Return(errors.New("error")). + Once() + }, + expected: Expected{ + conflicts: nil, + err: errors.New("error"), }, - expected: NewErrTagEmpty("tenant", errors.New("error", "", 0)), }, { - name: "fail when tag does not exist", - tag: "device3", - tenant: "tenant", + description: "succeeds updating tag", + req: &requests.UpdateTag{ + Name: "production", + NewName: "staging", + TenantID: "tenant1", + }, requiredMocks: func() { - namespace := &models.Namespace{Name: "namespace", TenantID: "tenant"} + tag := &models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000", Name: "production"} + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(tag, nil). + Once() + storeMock. + On("TagConflicts", ctx, "tenant1", &models.TagConflicts{Name: "staging"}). + Return([]string{}, false, nil). + Once() + + expectedTag := *tag + expectedTag.Name = "staging" + + storeMock. + On("TagUpdate", ctx, &expectedTag). + Return(nil). + Once() + }, + expected: Expected{ + conflicts: []string{}, + err: nil, + }, + }, + } - device := &models.Device{ - UID: "uid", - Namespace: "namespace", - TenantID: "tenant", - Tags: []string{"device1", "device2"}, - } + service := NewService(storeMock, privateKey, publicKey, nil, nil) + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + conflicts, err := service.UpdateTag(ctx, tc.req) + require.Equal(t, tc.expected, Expected{conflicts, err}) + }) + } + + storeMock.AssertExpectations(t) +} + +func TestService_DeleteTag(t *testing.T) { + storeMock := new(storemock.Store) + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(device.Tags, len(device.Tags), nil).Once() + ctx := context.TODO() + + cases := []struct { + description string + req *requests.DeleteTag + requiredMocks func() + expected error + }{ + { + description: "fails when namespace not found", + req: &requests.DeleteTag{ + Name: "production", + TenantID: "tenant1", + }, + requiredMocks: func() { + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(nil, errors.New("error")). + Once() }, - expected: NewErrTagNotFound("device3", nil), + expected: NewErrNamespaceNotFound("tenant1", errors.New("error")), }, { - name: "fail when the store function to delete the tag fails", - tag: "device1", - tenant: "tenant", + description: "fails when tag not found", + req: &requests.DeleteTag{ + Name: "production", + TenantID: "tenant1", + }, requiredMocks: func() { - namespace := &models.Namespace{Name: "namespace", TenantID: "tenant"} - - device := &models.Device{ - UID: "uid", - Namespace: "namespace", - TenantID: "tenant", - Tags: []string{"device1", "device2"}, + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(nil, errors.New("error")). + Once() + }, + expected: NewErrTagNotFound("production", errors.New("error")), + }, + { + description: "fails when tag pull fails", + req: &requests.DeleteTag{ + Name: "production", + TenantID: "tenant1", + }, + requiredMocks: func() { + tag := &models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000", Name: "production"} + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(tag, nil). + Once() + + for _, target := range store.TagTargets() { + storeMock. + On("TagPullFromTarget", ctx, "tag_00000000-0000-4000-0000-000000000000", target). + Return(errors.New("error")). + Once() + + break } - - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(device.Tags, len(device.Tags), nil).Once() - mock.On("TagsDelete", ctx, "tenant", "device1").Return(int64(0), errors.New("error", "", 0)).Once() }, - expected: errors.New("error", "", 0), + expected: errors.New("error"), }, { - name: "success to delete tags", - tag: "device1", - tenant: "tenant", + description: "fails when tag delete fails", + req: &requests.DeleteTag{ + Name: "production", + TenantID: "tenant1", + }, requiredMocks: func() { - namespace := &models.Namespace{Name: "namespace", TenantID: "tenant"} + tag := &models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000", Name: "production"} + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(tag, nil). + Once() + + for _, target := range store.TagTargets() { + storeMock. + On("TagPullFromTarget", ctx, "tag_00000000-0000-4000-0000-000000000000", target). + Return(nil). + Once() + } - device := &models.Device{ - UID: "uid", - Namespace: "namespace", - TenantID: "tenant", - Tags: []string{"device1", "device2"}, + storeMock. + On("TagDelete", ctx, tag). + Return(errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "succeeds", + req: &requests.DeleteTag{ + Name: "production", + TenantID: "tenant1", + }, + requiredMocks: func() { + tag := &models.Tag{ID: "tag_00000000-0000-4000-0000-000000000000", Name: "production"} + + storeMock. + On("NamespaceResolve", ctx, store.NamespaceTenantIDResolver, "tenant1"). + Return(&models.Namespace{}, nil). + Once() + queryOptionsMock. + On("InNamespace", "tenant1"). + Return(nil). + Once() + storeMock. + On("TagResolve", ctx, store.TagNameResolver, "production", mock.AnythingOfType("store.QueryOption")). + Return(tag, nil). + Once() + + for _, target := range store.TagTargets() { + storeMock. + On("TagPullFromTarget", ctx, "tag_00000000-0000-4000-0000-000000000000", target). + Return(nil). + Once() } - mock.On("NamespaceGet", ctx, "tenant").Return(namespace, nil).Once() - mock.On("TagsGet", ctx, "tenant").Return(device.Tags, len(device.Tags), nil).Once() - mock.On("TagsDelete", ctx, "tenant", "device1").Return(int64(1), nil).Once() + storeMock. + On("TagDelete", ctx, tag). + Return(nil). + Once() }, expected: nil, }, } + service := NewService(storeMock, privateKey, publicKey, nil, nil) + + storeMock. + On("WithTransaction", ctx, mock.AnythingOfType("store.TransactionCb")). + Return(func(ctx context.Context, cb store.TransactionCb) error { return cb(ctx) }). + Times(len(cases)) + for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { + t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - locator := &mocksGeoIp.Locator{} - service := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, locator) - - err := service.DeleteTag(ctx, tc.tenant, tc.tag) - assert.Equal(t, tc.expected, err) + err := service.DeleteTag(ctx, tc.req) + require.Equal(t, tc.expected, err) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } diff --git a/api/services/task.go b/api/services/task.go new file mode 100644 index 00000000000..cbec8e66082 --- /dev/null +++ b/api/services/task.go @@ -0,0 +1,166 @@ +package services + +import ( + "bufio" + "bytes" + "context" + "slices" + "time" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/worker" + log "github.com/sirupsen/logrus" +) + +const ( + TaskDevicesHeartbeat = worker.TaskPattern("api:heartbeat") + CronDeviceCleanup = worker.CronSpec("0 2 * * *") +) + +// DevicesHeartbeat creates a task handler for processing device heartbeat signals. The payload format is a +// newline-separated list of device UIDs. +func (s *service) DevicesHeartbeat() worker.TaskHandler { + return func(ctx context.Context, payload []byte) error { + log.WithField("task", TaskDevicesHeartbeat.String()). + Info("executing heartbeat task") + + scanner := bufio.NewScanner(bytes.NewReader(payload)) + scanner.Split(bufio.ScanLines) + + uids := make([]string, 0) + for scanner.Scan() { + uid := scanner.Text() + if uid == "" { + continue + } + + uids = append(uids, uid) + } + + slices.Sort(uids) + uids = slices.Compact(uids) + + mCount, err := s.store.DeviceHeartbeat(ctx, uids, clock.Now()) + if err != nil { + log.WithField("task", TaskDevicesHeartbeat.String()). + WithError(err). + Error("failed to complete the heartbeat task") + + return err + } + + log.WithField("task", TaskDevicesHeartbeat.String()). + WithField("modified_count", mCount). + Info("finishing heartbeat task") + + return nil + } +} + +func (s *service) DeviceCleanup() worker.CronHandler { + return func(ctx context.Context) error { + return s.store.WithTransaction(ctx, s.deviceCleanup()) + } +} + +func (s *service) deviceCleanup() store.TransactionCb { + return func(ctx context.Context) error { + log.Info("Starting device cleanup for removed devices") + + filter := &query.Filters{ + Data: []query.Filter{ + { + Type: query.FilterTypeProperty, + Params: &query.FilterProperty{Name: "removed_at", Operator: "lt", Value: time.Now().AddDate(0, 0, -30)}, + }, + }, + } + + sorter := &query.Sorter{ + By: "removed_at", + Order: query.OrderAsc, + } + + _, totalCount, err := s.store.DeviceList(ctx, store.DeviceAcceptableAsFalse, s.store.Options().Match(filter)) + if err != nil { + log.WithError(err).Error("Failed to get total count of removed devices") + + return err + } + + if totalCount == 0 { + log.Info("No removed devices found, cleanup completed") + + return nil + } + + log.WithField("total_devices", totalCount).Info("Found removed devices to cleanup") + + const pageSize = 1000 + totalDeleted := 0 + + deletedPerTenant := make(map[string]int64) + totalPages := (totalCount + pageSize - 1) / pageSize + + for page := range totalPages { + opts := []store.QueryOption{ + s.store.Options().Match(filter), + s.store.Options().Sort(sorter), + s.store.Options().Paginate(&query.Paginator{Page: page + 1, PerPage: pageSize}), + } + + devices, _, err := s.store.DeviceList(ctx, store.DeviceAcceptableAsFalse, opts...) + if err != nil { + log.WithFields(log.Fields{"page": page, "error": err}).Error("Failed to list removed devices for page") + + return err + } + + if len(devices) == 0 { + continue + } + + log.WithFields(log.Fields{"page": page + 1, "total_pages": totalPages, "devices": len(devices)}). + Info("Processing page of removed devices") + + uids := make([]string, len(devices)) + for i, device := range devices { + uids[i] = device.UID + totalDeleted++ + deletedPerTenant[device.TenantID]++ + } + + if _, err := s.store.DeviceDeleteMany(ctx, uids); err != nil { + log.WithField("page", page+1). + WithError(err). + Info("Failed to delete devices") + + return err + } + + log.WithFields(log.Fields{"page": page + 1, "total_pages": totalPages, "devices": len(devices), "total_deleted": totalDeleted}). + Info("Processing page of removed devices") + + if page < totalPages-1 { + time.Sleep(100 * time.Millisecond) + } + } + + for tenantID, deletedCount := range deletedPerTenant { + if err := s.store.NamespaceIncrementDeviceCount(ctx, tenantID, models.DeviceStatusRemoved, -deletedCount); err != nil { + log.WithFields(log.Fields{"tenant_id": tenantID, "deleted_count": deletedCount, "error": err}). + Error("Failed to decrement removed device count for namespace") + + return err + } + } + + log.WithFields(log.Fields{"total_found": totalCount, "total_deleted": totalDeleted}). + Info("Device cleanup completed successfully") + + return nil + } +} diff --git a/api/services/task_test.go b/api/services/task_test.go new file mode 100644 index 00000000000..d9f8768b4c5 --- /dev/null +++ b/api/services/task_test.go @@ -0,0 +1,460 @@ +package services + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/shellhub-io/shellhub/api/store" + storemock "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" +) + +func TestService_DevicesHeartbeat(t *testing.T) { + storeMock := new(storemock.Store) + clockMock := new(clockmock.Clock) + + clock.DefaultBackend = clockMock + + clockMock.On("Now").Return(now) + + cases := []struct { + description string + payload []byte + requiredMocks func(context.Context) + expected error + }{ + { + description: "fails when cannot set the status", + payload: []byte("0000000000000000000000000000000000000000000000000000000000000000\n0000000000000000000000000000000000000000000000000000000000000001"), + requiredMocks: func(ctx context.Context) { + storeMock. + On( + "DeviceHeartbeat", + ctx, + []string{"0000000000000000000000000000000000000000000000000000000000000000", "0000000000000000000000000000000000000000000000000000000000000001"}, + now, + ). + Return(int64(0), errors.New("error")). + Once() + }, + expected: errors.New("error"), + }, + { + description: "succeeds with duplicated IDs", + payload: []byte("0000000000000000000000000000000000000000000000000000000000000000\n0000000000000000000000000000000000000000000000000000000000000001\n0000000000000000000000000000000000000000000000000000000000000000"), + requiredMocks: func(ctx context.Context) { + storeMock. + On( + "DeviceHeartbeat", + ctx, + []string{"0000000000000000000000000000000000000000000000000000000000000000", "0000000000000000000000000000000000000000000000000000000000000001"}, + now, + ). + Return(int64(2), nil). + Once() + }, + expected: nil, + }, + { + description: "succeeds", + payload: []byte("0000000000000000000000000000000000000000000000000000000000000000\n0000000000000000000000000000000000000000000000000000000000000001"), + requiredMocks: func(ctx context.Context) { + storeMock. + On( + "DeviceHeartbeat", + ctx, + []string{"0000000000000000000000000000000000000000000000000000000000000000", "0000000000000000000000000000000000000000000000000000000000000001"}, + now, + ). + Return(int64(2), nil). + Once() + }, + expected: nil, + }, + } + + s := NewService(storeMock, privateKey, publicKey, cache.NewNullCache(), clientMock) + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + tc.requiredMocks(ctx) + require.Equal(tt, tc.expected, s.DevicesHeartbeat()(ctx, tc.payload)) + }) + } +} + +func TestService_DeviceCleanup(t *testing.T) { + ctx := context.Background() + + matchFilter := func() func(*query.Filters) bool { + return func(filters *query.Filters) bool { + if len(filters.Data) != 1 { + return false + } + + matchTime := func() bool { + filter := filters.Data[0] + if filter.Type != query.FilterTypeProperty { + return false + } + + params, ok := filter.Params.(*query.FilterProperty) + if !ok { + return false + } + + if params.Name != "removed_at" || params.Operator != "lt" { + return false + } + + timeValue, isTime := params.Value.(time.Time) + if !isTime { + return false + } + + expectedTime := time.Now().AddDate(0, 0, -30) + timeDiff := timeValue.Sub(expectedTime) + if timeDiff < 0 { + timeDiff = -timeDiff + } + + return timeDiff <= time.Second // allow 1 seconds tolerance + } + + return matchTime() + } + } + + storeMock := new(storemock.Store) + clockMock := new(clockmock.Clock) + + clock.DefaultBackend = clockMock + + now := time.Now() + clockMock.On("Now").Return(now) + + queryOptionsMock := new(storemock.QueryOptions) + storeMock.On("Options").Return(queryOptionsMock) + + thirtyDaysAgo := time.Now().AddDate(0, 0, -30) + sorter := query.Sorter{By: "removed_at", Order: query.OrderAsc} + + cases := []struct { + description string + requiredMocks func() + expected error + }{ + { + description: "fails when cannot get total count of removed devices", + requiredMocks: func() { + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, errors.New("database error")). + Once() + }, + expected: errors.New("database error"), + }, + { + description: "succeeds with no removed devices to cleanup", + requiredMocks: func() { + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, nil). + Once() + }, + expected: nil, + }, + { + description: "fails when cannot list devices for a page", + requiredMocks: func() { + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 1000, nil). + Once() + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &sorter). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 1000}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 0, errors.New("page error")). + Once() + }, + expected: errors.New("page error"), + }, + { + description: "fails when cannot delete some devices", + requiredMocks: func() { + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 2, nil). + Once() + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &sorter). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 1000}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + []models.Device{ + {UID: "device-1", TenantID: "tenant-1", RemovedAt: &thirtyDaysAgo}, + {UID: "device-2", TenantID: "tenant-1", RemovedAt: &thirtyDaysAgo}, + }, + 2, + nil, + ). + Once() + storeMock. + On("DeviceDeleteMany", ctx, []string{"device-1", "device-2"}). + Return(int64(0), errors.New("delete error")). + Once() + }, + expected: errors.New("delete error"), + }, + { + description: "fails when cannot update counters", + requiredMocks: func() { + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 3, nil). + Once() + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &sorter). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 1000}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + []models.Device{ + {UID: "device-1", TenantID: "tenant-1", RemovedAt: &thirtyDaysAgo}, + {UID: "device-2", TenantID: "tenant-1", RemovedAt: &thirtyDaysAgo}, + {UID: "device-3", TenantID: "tenant-2", RemovedAt: &thirtyDaysAgo}, + }, + 3, + nil, + ). + Once() + storeMock. + On("DeviceDeleteMany", ctx, []string{"device-1", "device-2", "device-3"}). + Return(int64(3), nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant-1", models.DeviceStatusRemoved, int64(-2)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant-2", models.DeviceStatusRemoved, int64(-1)). + Return(errors.New("update error")). + Once() + }, + expected: errors.New("update error"), + }, + { + description: "succeeds deleting all old removed devices and updates counters", + requiredMocks: func() { + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 3, nil). + Once() + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &sorter). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 1000}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + []models.Device{ + {UID: "device-1", TenantID: "tenant-1", RemovedAt: &thirtyDaysAgo}, + {UID: "device-2", TenantID: "tenant-1", RemovedAt: &thirtyDaysAgo}, + {UID: "device-3", TenantID: "tenant-2", RemovedAt: &thirtyDaysAgo}, + }, + 3, + nil, + ). + Once() + storeMock. + On("DeviceDeleteMany", ctx, []string{"device-1", "device-2", "device-3"}). + Return(int64(3), nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant-1", models.DeviceStatusRemoved, int64(-2)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant-2", models.DeviceStatusRemoved, int64(-1)). + Return(nil). + Once() + }, + expected: nil, + }, + { + description: "succeeds with multiple pages and updates counters correctly", + requiredMocks: func() { + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 2001, nil). + Once() + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &sorter). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 1, PerPage: 1000}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + []models.Device{ + {UID: "device-1", TenantID: "tenant-1", RemovedAt: &thirtyDaysAgo}, + }, + 2001, + nil, + ). + Once() + storeMock. + On("DeviceDeleteMany", ctx, []string{"device-1"}). + Return(int64(1), nil). + Once() + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &sorter). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 2, PerPage: 1000}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return( + []models.Device{ + {UID: "device-2", TenantID: "tenant-2", RemovedAt: &thirtyDaysAgo}, + }, + 2001, + nil, + ). + Once() + storeMock. + On("DeviceDeleteMany", ctx, []string{"device-2"}). + Return(int64(1), nil). + Once() + queryOptionsMock. + On("Match", mock.MatchedBy(matchFilter())). + Return(nil). + Once() + queryOptionsMock. + On("Sort", &sorter). + Return(nil). + Once() + queryOptionsMock. + On("Paginate", &query.Paginator{Page: 3, PerPage: 1000}). + Return(nil). + Once() + storeMock. + On("DeviceList", ctx, store.DeviceAcceptableAsFalse, mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption"), mock.AnythingOfType("store.QueryOption")). + Return([]models.Device{}, 2001, nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant-1", models.DeviceStatusRemoved, int64(-1)). + Return(nil). + Once() + storeMock. + On("NamespaceIncrementDeviceCount", ctx, "tenant-2", models.DeviceStatusRemoved, int64(-1)). + Return(nil). + Once() + }, + expected: nil, + }, + } + + s := NewService(storeMock, privateKey, publicKey, cache.NewNullCache(), clientMock) + + storeMock. + On("WithTransaction", ctx, mock.AnythingOfType("store.TransactionCb")). + Return(func(ctx context.Context, cb store.TransactionCb) error { return cb(ctx) }). + Times(len(cases)) + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tc.requiredMocks() + require.Equal(tt, tc.expected, s.DeviceCleanup()(ctx)) + }) + } +} diff --git a/api/services/user.go b/api/services/user.go index 1e2edd9f2ca..d6ba22c7952 100644 --- a/api/services/user.go +++ b/api/services/user.go @@ -2,76 +2,123 @@ package services import ( "context" + "strings" + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/requests" "github.com/shellhub-io/shellhub/pkg/models" ) type UserService interface { - UpdateDataUser(ctx context.Context, id string, userData models.UserData) ([]string, error) + // UpdateUser updates the user's data, such as email and username. Since some attributes must be unique per user, + // it returns a list of duplicated unique values and an error if any. + // + // FIX: + // When `req.RecoveryEmail` is equal to `user.Email` or `req.Email`, return a bad request status + // with an error object like `{"error": "recovery_email must be different from email"}` instead of setting + // conflicts to `["email", "recovery_email"]`. + UpdateUser(ctx context.Context, req *requests.UpdateUser) (conflicts []string, err error) + UpdatePasswordUser(ctx context.Context, id string, currentPassword, newPassword string) error } -// UpdateDataUser update user data. -// -// It receives a context, used to "control" the request flow, the user's ID, and a requests.UserDataUpdate struct with -// fields to update in the models.User. -// -// It returns a slice of strings with the fields that contains data duplicated in the database, and an error. -func (s *service) UpdateDataUser(ctx context.Context, id string, userData models.UserData) ([]string, error) { - if ok, err := s.validator.Struct(userData); !ok || err != nil { - return nil, NewErrUserInvalid(nil, err) +func (s *service) UpdateUser(ctx context.Context, req *requests.UpdateUser) ([]string, error) { + user, err := s.store.UserResolve(ctx, store.UserIDResolver, req.UserID) + if err != nil { + return []string{}, NewErrUserNotFound(req.UserID, nil) } - if _, _, err := s.store.UserGetByID(ctx, id, false); err != nil { - return nil, NewErrUserNotFound(id, nil) + if req.RecoveryEmail == user.Email || req.RecoveryEmail == req.Email { + return []string{"email", "recovery_email"}, NewErrBadRequest(nil) } - conflictFields := make([]string, 0) - existentUser, _ := s.store.UserGetByUsername(ctx, userData.Username) - if existentUser != nil && existentUser.ID != id { - conflictFields = append(conflictFields, "username") + conflictsTarget := &models.UserConflicts{Email: req.Email, Username: req.Username} + conflictsTarget.Distinct(user) + if conflicts, has, _ := s.store.UserConflicts(ctx, conflictsTarget); has { + return conflicts, NewErrUserDuplicated(conflicts, nil) } - existentUser, _ = s.store.UserGetByEmail(ctx, userData.Email) - if existentUser != nil && existentUser.ID != id { - conflictFields = append(conflictFields, "email") + updatedUser, err := applyUserChanges(user, req) + if err != nil { + return []string{}, err } - if len(conflictFields) > 0 { - return conflictFields, NewErrUserDuplicated(conflictFields, nil) + if err := s.store.UserUpdate(ctx, updatedUser); err != nil { + return []string{}, NewErrUserUpdate(user, err) } - return nil, s.store.UserUpdateData(ctx, id, models.User{ - UserData: models.UserData{ - Name: userData.Name, - Username: userData.Username, - Email: userData.Email, - }, - }) + return []string{}, nil } +// UpdatePasswordUser updates a user's password. +// +// Deprecated, use [Service.UpdateUser] instead. func (s *service) UpdatePasswordUser(ctx context.Context, id, currentPassword, newPassword string) error { - user, _, err := s.store.UserGetByID(ctx, id, false) + user, err := s.store.UserResolve(ctx, store.UserIDResolver, id) if user == nil { return NewErrUserNotFound(id, err) } - current := models.NewUserPassword(currentPassword) - - if !user.UserPassword.Compare(current) { + if !user.Password.Compare(currentPassword) { return NewErrUserPasswordNotMatch(nil) } - neo := models.NewUserPassword(newPassword) - - if ok, err := s.validator.Struct(neo); !ok || err != nil { + neo, err := models.HashUserPassword(newPassword) + if err != nil { return NewErrUserPasswordInvalid(err) } - // NOTE: when the password is equal to previous one, we return success without action on the database. - if user.UserPassword.Compare(neo) { - return nil + user.Password = neo + + if err := s.store.UserUpdate(ctx, user); err != nil { + return NewErrUserUpdate(user, err) + } + + return nil +} + +// applyUserChanges creates a new User instance by applying the requested changes to the current user. +// It returns a copy of the current user with updated fields, leaving the original unchanged. +// +// Only non-empty fields from changes are applied, and string comparisons are case-insensitive. +// String fields (Username, Email, RecoveryEmail) are normalized to lowercase. +// +// For password changes, the current password must be provided and match the existing password. +// The new password is hashed before being stored. +func applyUserChanges(currentUser *models.User, req *requests.UpdateUser) (*models.User, error) { + isDifferentAndNotEmpty := func(currentValue, newValue string) bool { + return newValue != "" && !strings.EqualFold(currentValue, newValue) + } + + newUser := *currentUser + + if isDifferentAndNotEmpty(currentUser.Name, req.Name) { + newUser.Name = req.Name + } + + if isDifferentAndNotEmpty(currentUser.Username, req.Username) { + newUser.Username = strings.ToLower(req.Username) + } + + if isDifferentAndNotEmpty(currentUser.Email, req.Email) { + newUser.Email = strings.ToLower(req.Email) + } + + if isDifferentAndNotEmpty(currentUser.RecoveryEmail, req.RecoveryEmail) { + newUser.RecoveryEmail = strings.ToLower(req.RecoveryEmail) + } + + if req.Password != "" { + if !currentUser.Password.Compare(req.CurrentPassword) { + return nil, NewErrUserPasswordNotMatch(nil) + } + + hashedPassword, err := models.HashUserPassword(req.Password) + if err != nil { + return nil, err + } + newUser.Password = hashedPassword } - return s.store.UserUpdatePassword(ctx, neo.HashedPassword, id) + return &newUser, nil } diff --git a/api/services/user_test.go b/api/services/user_test.go index a6a30b4063c..6c4e3588bcc 100644 --- a/api/services/user_test.go +++ b/api/services/user_test.go @@ -6,235 +6,308 @@ import ( "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/api/store/mocks" + "github.com/shellhub-io/shellhub/pkg/api/requests" storecache "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/errors" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/pkg/validator" "github.com/stretchr/testify/assert" ) -func TestUpdateDataUser(t *testing.T) { - mock := new(mocks.Store) - - ctx := context.Background() - +func TestUpdateUser(t *testing.T) { type Expected struct { - fields []string - err error + conflicts []string + err error } + storeMock := new(mocks.Store) + cases := []struct { description string - id string - data models.UserData - requiredMocks func() + req *requests.UpdateUser + requiredMocks func(context.Context) expected Expected }{ { - description: "Fail when user data is invalid", - id: "1", - requiredMocks: func() {}, + description: "Fail when user is not found", + req: &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "recovery@test.com", + }, + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(nil, NewErrUserNotFound("000000000000000000000000", nil)). + Once() + }, expected: Expected{ - fields: nil, - err: NewErrUserInvalid(nil, validator.ErrStructureInvalid), + conflicts: []string{}, + err: NewErrUserNotFound("000000000000000000000000", nil), }, }, { - description: "Fail when user is not found", - id: "1", - data: models.UserData{ - Name: "test", - Username: "test", - Email: "test@shellhub.io", + description: "Fail when recovery email is same as req's email", + req: &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "john.doe@test.com", }, - requiredMocks: func() { - mock.On("UserGetByID", ctx, "1", false).Return(nil, 0, NewErrUserNotFound("1", nil)).Once() + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return( + &models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{ + Name: "James Smith", + Username: "james_smith", + Email: "james.smith@test.com", + RecoveryEmail: "recover@test.com", + }, + }, + nil, + ). + Once() }, expected: Expected{ - fields: nil, - err: NewErrUserNotFound("1", nil), + conflicts: []string{"email", "recovery_email"}, + err: NewErrBadRequest(nil), }, }, { - description: "Fail when username already exists", - id: "1", - data: models.UserData{ - Name: "test", - Username: "new", - Email: "test@test.com", + description: "Fail when recovery email is same as user's email", + req: &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "james.smith@test.com", }, - requiredMocks: func() { - user := &models.User{ - ID: "1", - UserData: models.UserData{ - Name: "test", - Username: "test", - Email: "test@test.com", - }, - } - exist := &models.User{ - ID: "2", - UserData: models.UserData{ - Username: "new", - }, - } - - mock.On("UserGetByID", ctx, "1", false).Return(user, 1, nil).Once() - mock.On("UserGetByUsername", ctx, "new").Return(exist, nil).Once() - mock.On("UserGetByEmail", ctx, "test@test.com").Return(nil, nil).Once() + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return( + &models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{ + Name: "James Smith", + Username: "james_smith", + Email: "james.smith@test.com", + RecoveryEmail: "recover@test.com", + }, + }, + nil, + ). + Once() }, expected: Expected{ - fields: []string{"username"}, - err: NewErrUserDuplicated([]string{"username"}, nil), + conflicts: []string{"email", "recovery_email"}, + err: NewErrBadRequest(nil), }, }, { - description: "Fail when email already exists", - id: "1", - data: models.UserData{ - Name: "test", - Username: "test", - Email: "new@test.com", + description: "Fail when conflict fields exists", + req: &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "recovery@test.com", }, - requiredMocks: func() { - user := &models.User{ - ID: "1", - UserData: models.UserData{ - Email: "test@test.com", - }, - } - exist := &models.User{ - ID: "2", - UserData: models.UserData{ - Email: "new@test.com", - }, - } - - mock.On("UserGetByID", ctx, "1", false).Return(user, 1, nil).Once() - mock.On("UserGetByUsername", ctx, "test").Return(nil, nil).Once() - mock.On("UserGetByEmail", ctx, "new@test.com").Return(exist, nil).Once() + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return( + &models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{ + Name: "James Smith", + Username: "james_smith", + Email: "james.smith@test.com", + RecoveryEmail: "recover@test.com", + }, + }, + nil, + ). + Once() + storeMock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{"email"}, true, nil). + Once() }, expected: Expected{ - fields: []string{"email"}, - err: NewErrUserDuplicated([]string{"email"}, nil), + conflicts: []string{"email"}, + err: NewErrUserDuplicated([]string{"email"}, nil), }, }, { - description: "Fail when username and email already exists", - id: "1", - data: models.UserData{ - Name: "test", - Username: "new", - Email: "new@test.com", + description: "fails when the current password doesn't match with user's password", + req: &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "recovery@test.com", + Password: "new-secret", + CurrentPassword: "secret", }, - requiredMocks: func() { - user := &models.User{ - ID: "1", - UserData: models.UserData{ - Username: "test", - Email: "test@test.com", - }, - } - exist := &models.User{ - ID: "2", - UserData: models.UserData{ - Username: "new", - Email: "new@test.com", - }, - } - - mock.On("UserGetByID", ctx, "1", false).Return(user, 1, nil).Once() - mock.On("UserGetByUsername", ctx, "new").Return(exist, nil).Once() - mock.On("UserGetByEmail", ctx, "new@test.com").Return(exist, nil).Once() + requiredMocks: func(ctx context.Context) { + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return( + &models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{ + Name: "James Smith", + Username: "james_smith", + Email: "james.smith@shellhub.io", + RecoveryEmail: "recover@test.com", + }, + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + }, + nil, + ). + Once() + storeMock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{}, false, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(false). + Once() }, expected: Expected{ - fields: []string{"username", "email"}, - err: NewErrUserDuplicated([]string{"username", "email"}, nil), + conflicts: []string{}, + err: NewErrUserPasswordNotMatch(nil), }, }, { description: "Fail when could not update user", - id: "1", - data: models.UserData{ - Name: "test", - Username: "new", - Email: "new@test.com", + req: &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "recovery@test.com", }, - requiredMocks: func() { + requiredMocks: func(ctx context.Context) { user := &models.User{ - ID: "1", + ID: "000000000000000000000000", UserData: models.UserData{ - Username: "test", - Email: "test@test.com", + Name: "James Smith", + Username: "james_smith", + Email: "james.smith@shellhub.io", + RecoveryEmail: "recover@test.com", }, } - - data := models.User{ + updatedUser := &models.User{ + ID: "000000000000000000000000", UserData: models.UserData{ - Name: "test", - Username: "new", - Email: "new@test.com", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "recovery@test.com", }, } - mock.On("UserGetByID", ctx, "1", false).Return(user, 1, nil).Once() - mock.On("UserGetByUsername", ctx, "new").Return(nil, nil).Once() - mock.On("UserGetByEmail", ctx, "new@test.com").Return(nil, nil).Once() - mock.On("UserUpdateData", ctx, "1", data).Return(errors.New("error", "", 0)).Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(user, nil). + Once() + storeMock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("UserUpdate", ctx, updatedUser). + Return(errors.New("error", "", 0)). + Once() }, expected: Expected{ - fields: nil, - err: errors.New("error", "", 0), + conflicts: []string{}, + err: NewErrUserUpdate( + &models.User{ + ID: "000000000000000000000000", + UserData: models.UserData{ + Name: "James Smith", + Username: "james_smith", + Email: "james.smith@shellhub.io", + RecoveryEmail: "recover@test.com", + }, + }, + errors.New("error", "", 0), + ), }, }, { description: "Success to update user", - id: "1", - data: models.UserData{ - Name: "test", - Username: "new", - Email: "new@test.com", + req: &requests.UpdateUser{ + UserID: "000000000000000000000000", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "recovery@test.com", }, - requiredMocks: func() { + requiredMocks: func(ctx context.Context) { user := &models.User{ - ID: "1", + ID: "000000000000000000000000", UserData: models.UserData{ - Username: "test", - Email: "test@test.com", + Name: "James Smith", + Username: "james_smith", + Email: "james.smith@shellhub.io", + RecoveryEmail: "recover@test.com", }, } - - data := models.User{ + updatedUser := &models.User{ + ID: "000000000000000000000000", UserData: models.UserData{ - Name: "test", - Username: "new", - Email: "new@test.com", + Name: "John Doe", + Username: "john_doe", + Email: "john.doe@test.com", + RecoveryEmail: "recovery@test.com", }, } - mock.On("UserGetByID", ctx, "1", false).Return(user, 1, nil).Once() - mock.On("UserGetByUsername", ctx, "new").Return(nil, nil).Once() - mock.On("UserGetByEmail", ctx, "new@test.com").Return(nil, nil).Once() - mock.On("UserUpdateData", ctx, "1", data).Return(nil).Once() + storeMock. + On("UserResolve", ctx, store.UserIDResolver, "000000000000000000000000"). + Return(user, nil). + Once() + storeMock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{}, false, nil). + Once() + storeMock. + On("UserUpdate", ctx, updatedUser). + Return(nil). + Once() }, expected: Expected{ - fields: nil, - err: nil, + conflicts: []string{}, + err: nil, }, }, } + service := NewService(store.Store(storeMock), privateKey, publicKey, storecache.NewNullCache(), clientMock) + for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - tc.requiredMocks() + ctx := context.Background() + tc.requiredMocks(ctx) - services := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) - fields, err := services.UpdateDataUser(ctx, tc.id, tc.data) - assert.Equal(t, tc.expected, Expected{fields, err}) + conflicts, err := service.UpdateUser(ctx, tc.req) + assert.Equal(t, tc.expected, Expected{conflicts, err}) }) } - mock.AssertExpectations(t) + storeMock.AssertExpectations(t) } func TestUpdatePasswordUser(t *testing.T) { @@ -251,41 +324,145 @@ func TestUpdatePasswordUser(t *testing.T) { expected error }{ { - description: "Fail when user is not found", - id: "1", + description: "fails when user is not found", + id: "65fde3a72c4c7507c7f53c43", requiredMocks: func() { - mock.On("UserGetByID", ctx, "1", false).Return(nil, 0, errors.New("error", "", 0)).Once() + mock. + On("UserResolve", ctx, store.UserIDResolver, "65fde3a72c4c7507c7f53c43"). + Return(nil, errors.New("error", "", 0)). + Once() }, - expected: NewErrUserNotFound("1", errors.New("error", "", 0)), + expected: NewErrUserNotFound("65fde3a72c4c7507c7f53c43", errors.New("error", "", 0)), }, { - description: "Fail when the current password doesn't match with user's password", + description: "fails when the current password doesn't match with user's password", id: "1", - currentPassword: "password", - newPassword: "newPassword", + currentPassword: "wrong_password", + newPassword: "newSecret", requiredMocks: func() { user := &models.User{ - UserPassword: models.NewUserPassword("passwordNoMatch"), + Password: models.UserPassword{ + Hash: "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, } - mock.On("UserGetByID", ctx, "1", false).Return(user, 1, nil).Once() + mock. + On("UserResolve", ctx, store.UserIDResolver, "1"). + Return(user, nil). + Once() + hashMock. + On("CompareWith", "wrong_password", "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(false). + Once() }, expected: NewErrUserPasswordNotMatch(nil), }, { - description: "Fail to update user's password", - id: "1", - currentPassword: "password", - newPassword: "newPassword", + description: "fail when unable to hash the new password", + id: "65fde3a72c4c7507c7f53c43", + currentPassword: "secret", + newPassword: "newSecret", + requiredMocks: func() { + user := &models.User{ + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + } + + mock. + On("UserResolve", ctx, store.UserIDResolver, "65fde3a72c4c7507c7f53c43"). + Return(user, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(true). + Once() + hashMock. + On("Do", "newSecret"). + Return("", errors.New("error", "", 0)). + Once() + }, + expected: NewErrUserPasswordInvalid(errors.New("error", "", 0)), + }, + { + description: "fail to update the user's password", + id: "65fde3a72c4c7507c7f53c43", + currentPassword: "secret", + newPassword: "newSecret", requiredMocks: func() { user := &models.User{ - UserPassword: models.NewUserPassword("password"), + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, } + mock. + On("UserResolve", ctx, store.UserIDResolver, "65fde3a72c4c7507c7f53c43"). + Return(user, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(true). + Once() + hashMock. + On("Do", "newSecret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yv", nil). + Once() + + expectedUser := *user + expectedUser.Password.Plain = "newSecret" + expectedUser.Password.Hash = "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yv" + + mock. + On("UserUpdate", ctx, &expectedUser). + Return(errors.New("error", "", 0)). + Once() + }, + expected: NewErrUserUpdate( + &models.User{ + Password: models.UserPassword{ + Plain: "newSecret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yv", + }, + }, + errors.New("error", "", 0), + ), + }, + { + description: "succeeds to update the password", + id: "65fde3a72c4c7507c7f53c43", + currentPassword: "secret", + newPassword: "newSecret", + requiredMocks: func() { + user := &models.User{ + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", + }, + } + + mock. + On("UserResolve", ctx, store.UserIDResolver, "65fde3a72c4c7507c7f53c43"). + Return(user, nil). + Once() + hashMock. + On("CompareWith", "secret", "$2a$10$V/6N1wsjheBVvWosVVVV2uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"). + Return(true). + Once() + hashMock. + On("Do", "newSecret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yv", nil). + Once() - password := models.NewUserPassword("newPassword") + expectedUser := *user + expectedUser.Password.Plain = "newSecret" + expectedUser.Password.Hash = "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yv" - mock.On("UserGetByID", ctx, "1", false).Return(user, 1, nil).Once() - mock.On("UserUpdatePassword", ctx, password.HashedPassword, "1").Return(nil).Once() + mock. + On("UserUpdate", ctx, &expectedUser). + Return(nil). + Once() }, expected: nil, }, @@ -295,7 +472,7 @@ func TestUpdatePasswordUser(t *testing.T) { t.Run(tc.description, func(t *testing.T) { tc.requiredMocks() - services := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock, nil) + services := NewService(store.Store(mock), privateKey, publicKey, storecache.NewNullCache(), clientMock) err := services.UpdatePasswordUser(ctx, tc.id, tc.currentPassword, tc.newPassword) assert.Equal(t, tc.expected, err) }) diff --git a/api/services/utils.go b/api/services/utils.go index fb64061069c..44bb4d90567 100644 --- a/api/services/utils.go +++ b/api/services/utils.go @@ -30,13 +30,3 @@ func LoadKeys() (*rsa.PrivateKey, *rsa.PublicKey, error) { return privKey, pubKey, nil } - -func contains(list []string, item string) bool { - for _, i := range list { - if i == item { - return true - } - } - - return false -} diff --git a/api/store/announcement.go b/api/store/announcement.go deleted file mode 100644 index 115a71d122b..00000000000 --- a/api/store/announcement.go +++ /dev/null @@ -1,17 +0,0 @@ -package store - -import ( - "context" - - "github.com/shellhub-io/shellhub/pkg/api/order" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/models" -) - -type AnnouncementsStore interface { - AnnouncementList(ctx context.Context, pagination paginator.Query, ordination order.Query) ([]models.AnnouncementShort, int, error) - AnnouncementGet(ctx context.Context, uuid string) (*models.Announcement, error) - AnnouncementCreate(ctx context.Context, announcement *models.Announcement) error - AnnouncementUpdate(ctx context.Context, announcement *models.Announcement) error - AnnouncementDelete(ctx context.Context, uuid string) error -} diff --git a/api/store/api_key.go b/api/store/api_key.go new file mode 100644 index 00000000000..edab7a48109 --- /dev/null +++ b/api/store/api_key.go @@ -0,0 +1,40 @@ +package store + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" +) + +type APIKeyResolver uint + +const ( + APIKeyIDResolver APIKeyResolver = iota + 1 + APIKeyNameResolver +) + +type APIKeyStore interface { + // APIKeyCreate creates an API key with the provided data. Returns the inserted ID and an error if any. + APIKeyCreate(ctx context.Context, APIKey *models.APIKey) (insertedID string, err error) + + // APIKeyResolve fetches an API key using a specific resolver within a given tenant ID. + // + // It returns the resolved API key if found and an error, if any. + APIKeyResolve(ctx context.Context, resolver APIKeyResolver, value string, opts ...QueryOption) (*models.APIKey, error) + + // APIKeyConflicts reports whether the target contains conflicting attributes with the database. Pass zero values for + // attributes you do not wish to match on. It returns an array of conflicting attribute fields and an error, if any. + // + // API keys attributes can be duplicated in document level when the tenant id are different. + APIKeyConflicts(ctx context.Context, tenantID string, target *models.APIKeyConflicts) (conflicts []string, has bool, err error) + + // APIKeyList retrieves a list of API keys. + // Returns the list of API keys, the total count of matched documents, and an error if any. + APIKeyList(ctx context.Context, opts ...QueryOption) (apiKeys []models.APIKey, count int, err error) + + // APIKeyUpdate updates an API key. It returns an error if any. + APIKeyUpdate(ctx context.Context, apiKey *models.APIKey) (err error) + + // APIKeyDelete deletes an API key. It returns an error if any. + APIKeyDelete(ctx context.Context, apiKey *models.APIKey) (err error) +} diff --git a/api/store/device.go b/api/store/device.go index 394adfc17bc..d6c5d866de2 100644 --- a/api/store/device.go +++ b/api/store/device.go @@ -4,43 +4,56 @@ import ( "context" "time" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/models" ) -type DeviceListMode uint +type DeviceAcceptable uint const ( - DeviceListModeDefault DeviceListMode = iota + 1 - // DeviceListModeMaxDeviceReached is used to indicate to the DeviceList method that the namepsace's device maxium - // number of devices has been reached and should set the "acceptable" value to true for devices that were recently - // removed. - DeviceListModeMaxDeviceReached + // DeviceAcceptableIfNotAccepted is used to indicate the all devices not accepted will be defined as "acceptabled". + DeviceAcceptableIfNotAccepted DeviceAcceptable = iota + 1 + // DeviceAcceptableFromRemoved is used to indicate that the namepsace's device maxium number of devices has been + // reached and should set the "acceptable" value to true for devices that were recently removed. + DeviceAcceptableFromRemoved + // DeviceAcceptableAsFalse set acceptable to false to all returned devices. + DeviceAcceptableAsFalse +) + +type DeviceResolver uint + +const ( + DeviceUIDResolver DeviceResolver = iota + 1 + DeviceHostnameResolver + DeviceMACResolver ) type DeviceStore interface { - DeviceList(ctx context.Context, pagination paginator.Query, filters []models.Filter, status models.DeviceStatus, sort string, order string, mode DeviceListMode) ([]models.Device, int, error) - DeviceGet(ctx context.Context, uid models.UID) (*models.Device, error) - DeviceUpdate(ctx context.Context, tenant string, uid models.UID, name *string, publicURL *bool) error - DeviceDelete(ctx context.Context, uid models.UID) error - DeviceCreate(ctx context.Context, d models.Device, hostname string) error - DeviceRename(ctx context.Context, uid models.UID, hostname string) error - DeviceLookup(ctx context.Context, namespace, hostname string) (*models.Device, error) - DeviceSetOnline(ctx context.Context, uid models.UID, timestamp time.Time, online bool) error - DeviceUpdateOnline(ctx context.Context, uid models.UID, online bool) error - DeviceUpdateLastSeen(ctx context.Context, uid models.UID, ts time.Time) error - DeviceUpdateStatus(ctx context.Context, uid models.UID, status models.DeviceStatus) error - DeviceGetByMac(ctx context.Context, mac string, tenantID string, status models.DeviceStatus) (*models.Device, error) - DeviceGetByName(ctx context.Context, name string, tenantID string, status models.DeviceStatus) (*models.Device, error) - DeviceGetByUID(ctx context.Context, uid models.UID, tenantID string) (*models.Device, error) - DeviceSetPosition(ctx context.Context, uid models.UID, position models.DevicePosition) error - DeviceListByUsage(ctx context.Context, tenantID string) ([]models.UID, error) - DeviceChooser(ctx context.Context, tenantID string, chosen []string) error - DeviceRemovedCount(ctx context.Context, tenant string) (int64, error) - DeviceRemovedGet(ctx context.Context, tenant string, uid models.UID) (*models.DeviceRemoved, error) - DeviceRemovedInsert(ctx context.Context, tenant string, device *models.Device) error - DeviceRemovedDelete(ctx context.Context, tenant string, uid models.UID) error - DeviceRemovedList(ctx context.Context, tenant string, pagination paginator.Query, filters []models.Filter, sort string, order string) ([]models.DeviceRemoved, int, error) - DeviceCreatePublicURLAddress(ctx context.Context, uid models.UID) error - DeviceGetByPublicURLAddress(ctx context.Context, address string) (*models.Device, error) + // DeviceCreate creates a new device. It returns the inserted UID and an error, if any. + DeviceCreate(ctx context.Context, device *models.Device) (insertedUID string, err error) + + DeviceList(ctx context.Context, acceptable DeviceAcceptable, opts ...QueryOption) ([]models.Device, int, error) + + // DeviceResolve fetches a device using a specific resolver within a given tenant ID. + // + // It returns the resolved device if found and an error, if any. + DeviceResolve(ctx context.Context, resolver DeviceResolver, value string, opts ...QueryOption) (*models.Device, error) + + // DeviceConflicts reports whether the target contains conflicting attributes with the database. Pass zero values for + // attributes you do not wish to match on. For example, the following call checks for conflicts based on email only: + // + // ctx := context.Background() + // conflicts, has, err := store.DeviceConflicts(ctx, &models.DeviceConflicts{Name: "mydevice"}) + // + // It returns an array of conflicting attribute fields and an error, if any. + DeviceConflicts(ctx context.Context, target *models.DeviceConflicts) (conflicts []string, has bool, err error) + + // DeviceUpdate updates a device. It returns [ErrNoDocuments] if none device is found. + DeviceUpdate(ctx context.Context, device *models.Device) error + // DeviceHeartbeat updates the last_seen timestamp and sets disconnected_at to nil for multiple devices. + // It returns the number of modified devices and an error if any. + DeviceHeartbeat(ctx context.Context, uids []string, lastSeen time.Time) (modifiedCount int64, err error) + + DeviceDelete(ctx context.Context, device *models.Device) error + // DeviceDeleteMany deletes multiple devices by their UIDs. + DeviceDeleteMany(ctx context.Context, uids []string) (deletedCount int64, err error) } diff --git a/api/store/device_tags.go b/api/store/device_tags.go deleted file mode 100644 index 479a8483118..00000000000 --- a/api/store/device_tags.go +++ /dev/null @@ -1,33 +0,0 @@ -package store - -import ( - "context" - - "github.com/shellhub-io/shellhub/pkg/models" -) - -type DeviceTagsStore interface { - // DevicePushTag adds a new tag to the list of tags for a device with the specified UID. - // Returns an error if any issues occur during the tag addition or ErrNoDocuments when matching documents are found. - DevicePushTag(ctx context.Context, uid models.UID, tag string) error - - // DevicePullTag removes a tag from the list of tags for a device with the specified UID. - // Returns an error if any issues occur during the tag removal or ErrNoDocuments when matching documents are found. - DevicePullTag(ctx context.Context, uid models.UID, tag string) error - - // DeviceSetTags sets the tags for a device with the specified UID. - // It returns the number of matching documents, the number of modified documents, and any encountered errors. - DeviceSetTags(ctx context.Context, uid models.UID, tags []string) (matchedCount int64, updatedCount int64, err error) - - // DeviceBulkRenameTag replaces all occurrences of the old tag with the new tag for all devices belonging to the specified tenant. - // Returns the number of documents updated and an error if any issues occur during the tag renaming. - DeviceBulkRenameTag(ctx context.Context, tenant, currentTag, newTag string) (updatedCount int64, err error) - - // DeviceBulkDeleteTag removes a tag from all devices belonging to the specified tenant. - // Returns the number of documents updated and an error if any issues occur during the tag deletion. - DeviceBulkDeleteTag(ctx context.Context, tenant, tag string) (deletedCount int64, err error) - - // DeviceGetTags retrieves all tags associated with the tenant. - // Returns the tags, the number of tags, and an error if any issues occur. - DeviceGetTags(ctx context.Context, tenant string) (tag []string, n int, err error) -} diff --git a/api/store/firewall.go b/api/store/firewall.go deleted file mode 100644 index ceaacc46712..00000000000 --- a/api/store/firewall.go +++ /dev/null @@ -1,16 +0,0 @@ -package store - -import ( - "context" - - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/models" -) - -type FirewallStore interface { - FirewallRuleList(ctx context.Context, pagination paginator.Query) ([]models.FirewallRule, int, error) - FirewallRuleCreate(ctx context.Context, rule *models.FirewallRule) error - FirewallRuleGet(ctx context.Context, id string) (*models.FirewallRule, error) - FirewallRuleUpdate(ctx context.Context, id string, rule models.FirewallRuleUpdate) (*models.FirewallRule, error) - FirewallRuleDelete(ctx context.Context, id string) error -} diff --git a/api/store/firewall_tags.go b/api/store/firewall_tags.go deleted file mode 100644 index 3bbd7f2e4d6..00000000000 --- a/api/store/firewall_tags.go +++ /dev/null @@ -1,31 +0,0 @@ -package store - -import "context" - -type FirewallTagsStore interface { - // FirewallRulePushTag adds a new tag to the list of tags for a device with the specified UID. - // Returns an error if any issues occur during the tag addition or ErrNoDocuments when matching documents are found. - // - // The tag need to exist on a device. If it is not true, the action will fail. - FirewallRulePushTag(ctx context.Context, id, tag string) error - - // FirewallRulePullTag removes a tag from the list of tags for a device with the specified UID. - // Returns an error if any issues occur during the tag removal or ErrNoDocuments when matching documents are found. - // - // To remove a tag, that tag needs to exist on a device. If it is not, the action will fail. - FirewallRulePullTag(ctx context.Context, id, tag string) error - - FirewallRuleSetTags(ctx context.Context, id string, tags []string) error - - // FirewallRuleBulkRenameTag replaces all occurrences of the old tag with the new tag for all firewall rules belonging to the specified tenant. - // Returns the number of documents updated and an error if any issues occur during the tag renaming. - FirewallRuleBulkRenameTag(ctx context.Context, tenant, currentTag, newTag string) (updatedCount int64, err error) - - // FirewallRuleBulkDeleteTag removes a tag from all firewall rules belonging to the specified tenant. - // Returns the number of documents updated and an error if any issues occur during the tag deletion. - FirewallRuleBulkDeleteTag(ctx context.Context, tenant, tag string) (updatedCount int64, err error) - - // FirewallRuleGetTags retrieves all tags associated with the tenant. - // Returns the tags, the number of tags, and an error if any issues occur. - FirewallRuleGetTags(ctx context.Context, tenant string) (tag []string, n int, err error) -} diff --git a/api/store/license.go b/api/store/license.go deleted file mode 100644 index d3e565eabf5..00000000000 --- a/api/store/license.go +++ /dev/null @@ -1,12 +0,0 @@ -package store - -import ( - "context" - - "github.com/shellhub-io/shellhub/pkg/models" -) - -type LicenseStore interface { - LicenseLoad(ctx context.Context) (*models.License, error) - LicenseSave(ctx context.Context, license *models.License) error -} diff --git a/api/store/member.go b/api/store/member.go new file mode 100644 index 00000000000..56ec61188ce --- /dev/null +++ b/api/store/member.go @@ -0,0 +1,13 @@ +package store + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" +) + +type MemberStore interface { + NamespaceCreateMembership(ctx context.Context, tenantID string, member *models.Member) error + NamespaceUpdateMembership(ctx context.Context, tenantID string, member *models.Member) error + NamespaceDeleteMembership(ctx context.Context, tenantID string, member *models.Member) error +} diff --git a/api/store/membership-invitations.go b/api/store/membership-invitations.go new file mode 100644 index 00000000000..1f093773b9c --- /dev/null +++ b/api/store/membership-invitations.go @@ -0,0 +1,19 @@ +package store + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" +) + +type MembershipInvitationsStore interface { + // MembershipInvitationCreate creates a new membership invitation. + MembershipInvitationCreate(ctx context.Context, invitation *models.MembershipInvitation) error + + // MembershipInvitationResolve retrieves the most recent membership invitation for the specified tenant and user. + // It returns the invitation or an error, if any. + MembershipInvitationResolve(ctx context.Context, tenantID, userID string) (*models.MembershipInvitation, error) + + // MembershipInvitationUpdate updates an existing membership invitation. + MembershipInvitationUpdate(ctx context.Context, invitation *models.MembershipInvitation) error +} diff --git a/api/store/mfa_store.go b/api/store/mfa_store.go deleted file mode 100644 index 68fa94a4101..00000000000 --- a/api/store/mfa_store.go +++ /dev/null @@ -1,17 +0,0 @@ -package store - -import ( - "context" -) - -type MFAStore interface { - AddStatusMFA(ctx context.Context, username string, statusMFA bool) error - GetStatusMFA(ctx context.Context, id string) (bool, error) - AddSecret(ctx context.Context, username string, secret string) error - GetSecret(ctx context.Context, id string) (string, error) - DeleteSecret(ctx context.Context, username string) error - GetCodes(ctx context.Context, id string) ([]string, error) - AddCodes(ctx context.Context, username string, codes []string) error - UpdateCodes(ctx context.Context, id string, codes []string) error - DeleteCodes(ctx context.Context, username string) error -} diff --git a/api/store/mocks/query_options.go b/api/store/mocks/query_options.go new file mode 100644 index 00000000000..8253b5bbbe7 --- /dev/null +++ b/api/store/mocks/query_options.go @@ -0,0 +1,131 @@ +// Code generated by mockery v2.53.2. DO NOT EDIT. + +package mocks + +import ( + models "github.com/shellhub-io/shellhub/pkg/models" + mock "github.com/stretchr/testify/mock" + + query "github.com/shellhub-io/shellhub/pkg/api/query" + + store "github.com/shellhub-io/shellhub/api/store" +) + +// QueryOptions is an autogenerated mock type for the QueryOptions type +type QueryOptions struct { + mock.Mock +} + +// InNamespace provides a mock function with given fields: tenantID +func (_m *QueryOptions) InNamespace(tenantID string) store.QueryOption { + ret := _m.Called(tenantID) + + if len(ret) == 0 { + panic("no return value specified for InNamespace") + } + + var r0 store.QueryOption + if rf, ok := ret.Get(0).(func(string) store.QueryOption); ok { + r0 = rf(tenantID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.QueryOption) + } + } + + return r0 +} + +// Match provides a mock function with given fields: fs +func (_m *QueryOptions) Match(fs *query.Filters) store.QueryOption { + ret := _m.Called(fs) + + if len(ret) == 0 { + panic("no return value specified for Match") + } + + var r0 store.QueryOption + if rf, ok := ret.Get(0).(func(*query.Filters) store.QueryOption); ok { + r0 = rf(fs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.QueryOption) + } + } + + return r0 +} + +// Paginate provides a mock function with given fields: paginator +func (_m *QueryOptions) Paginate(paginator *query.Paginator) store.QueryOption { + ret := _m.Called(paginator) + + if len(ret) == 0 { + panic("no return value specified for Paginate") + } + + var r0 store.QueryOption + if rf, ok := ret.Get(0).(func(*query.Paginator) store.QueryOption); ok { + r0 = rf(paginator) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.QueryOption) + } + } + + return r0 +} + +// Sort provides a mock function with given fields: sorter +func (_m *QueryOptions) Sort(sorter *query.Sorter) store.QueryOption { + ret := _m.Called(sorter) + + if len(ret) == 0 { + panic("no return value specified for Sort") + } + + var r0 store.QueryOption + if rf, ok := ret.Get(0).(func(*query.Sorter) store.QueryOption); ok { + r0 = rf(sorter) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.QueryOption) + } + } + + return r0 +} + +// WithDeviceStatus provides a mock function with given fields: _a0 +func (_m *QueryOptions) WithDeviceStatus(_a0 models.DeviceStatus) store.QueryOption { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for WithDeviceStatus") + } + + var r0 store.QueryOption + if rf, ok := ret.Get(0).(func(models.DeviceStatus) store.QueryOption); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.QueryOption) + } + } + + return r0 +} + +// NewQueryOptions creates a new instance of QueryOptions. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewQueryOptions(t interface { + mock.TestingT + Cleanup(func()) +}) *QueryOptions { + mock := &QueryOptions{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/api/store/mocks/store.go b/api/store/mocks/store.go index 4a8b3bd6959..41e3f81226c 100644 --- a/api/store/mocks/store.go +++ b/api/store/mocks/store.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.32.4. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -8,10 +8,6 @@ import ( models "github.com/shellhub-io/shellhub/pkg/models" mock "github.com/stretchr/testify/mock" - order "github.com/shellhub-io/shellhub/pkg/api/order" - - paginator "github.com/shellhub-io/shellhub/pkg/api/paginator" - store "github.com/shellhub-io/shellhub/api/store" time "time" @@ -22,69 +18,82 @@ type Store struct { mock.Mock } -// AddCodes provides a mock function with given fields: ctx, username, codes -func (_m *Store) AddCodes(ctx context.Context, username string, codes []string) error { - ret := _m.Called(ctx, username, codes) +// APIKeyConflicts provides a mock function with given fields: ctx, tenantID, target +func (_m *Store) APIKeyConflicts(ctx context.Context, tenantID string, target *models.APIKeyConflicts) ([]string, bool, error) { + ret := _m.Called(ctx, tenantID, target) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, []string) error); ok { - r0 = rf(ctx, username, codes) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for APIKeyConflicts") } - return r0 -} + var r0 []string + var r1 bool + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, string, *models.APIKeyConflicts) ([]string, bool, error)); ok { + return rf(ctx, tenantID, target) + } + if rf, ok := ret.Get(0).(func(context.Context, string, *models.APIKeyConflicts) []string); ok { + r0 = rf(ctx, tenantID, target) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } -// AddSecret provides a mock function with given fields: ctx, username, secret -func (_m *Store) AddSecret(ctx context.Context, username string, secret string) error { - ret := _m.Called(ctx, username, secret) + if rf, ok := ret.Get(1).(func(context.Context, string, *models.APIKeyConflicts) bool); ok { + r1 = rf(ctx, tenantID, target) + } else { + r1 = ret.Get(1).(bool) + } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { - r0 = rf(ctx, username, secret) + if rf, ok := ret.Get(2).(func(context.Context, string, *models.APIKeyConflicts) error); ok { + r2 = rf(ctx, tenantID, target) } else { - r0 = ret.Error(0) + r2 = ret.Error(2) } - return r0 + return r0, r1, r2 } -// AddStatusMFA provides a mock function with given fields: ctx, username, statusMFA -func (_m *Store) AddStatusMFA(ctx context.Context, username string, statusMFA bool) error { - ret := _m.Called(ctx, username, statusMFA) +// APIKeyCreate provides a mock function with given fields: ctx, APIKey +func (_m *Store) APIKeyCreate(ctx context.Context, APIKey *models.APIKey) (string, error) { + ret := _m.Called(ctx, APIKey) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, bool) error); ok { - r0 = rf(ctx, username, statusMFA) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for APIKeyCreate") } - return r0 -} - -// AnnouncementCreate provides a mock function with given fields: ctx, announcement -func (_m *Store) AnnouncementCreate(ctx context.Context, announcement *models.Announcement) error { - ret := _m.Called(ctx, announcement) + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *models.APIKey) (string, error)); ok { + return rf(ctx, APIKey) + } + if rf, ok := ret.Get(0).(func(context.Context, *models.APIKey) string); ok { + r0 = rf(ctx, APIKey) + } else { + r0 = ret.Get(0).(string) + } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Announcement) error); ok { - r0 = rf(ctx, announcement) + if rf, ok := ret.Get(1).(func(context.Context, *models.APIKey) error); ok { + r1 = rf(ctx, APIKey) } else { - r0 = ret.Error(0) + r1 = ret.Error(1) } - return r0 + return r0, r1 } -// AnnouncementDelete provides a mock function with given fields: ctx, uuid -func (_m *Store) AnnouncementDelete(ctx context.Context, uuid string) error { - ret := _m.Called(ctx, uuid) +// APIKeyDelete provides a mock function with given fields: ctx, apiKey +func (_m *Store) APIKeyDelete(ctx context.Context, apiKey *models.APIKey) error { + ret := _m.Called(ctx, apiKey) + + if len(ret) == 0 { + panic("no return value specified for APIKeyDelete") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, uuid) + if rf, ok := ret.Get(0).(func(context.Context, *models.APIKey) error); ok { + r0 = rf(ctx, apiKey) } else { r0 = ret.Error(0) } @@ -92,58 +101,43 @@ func (_m *Store) AnnouncementDelete(ctx context.Context, uuid string) error { return r0 } -// AnnouncementGet provides a mock function with given fields: ctx, uuid -func (_m *Store) AnnouncementGet(ctx context.Context, uuid string) (*models.Announcement, error) { - ret := _m.Called(ctx, uuid) - - var r0 *models.Announcement - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Announcement, error)); ok { - return rf(ctx, uuid) - } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.Announcement); ok { - r0 = rf(ctx, uuid) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Announcement) - } +// APIKeyList provides a mock function with given fields: ctx, opts +func (_m *Store) APIKeyList(ctx context.Context, opts ...store.QueryOption) ([]models.APIKey, int, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, uuid) - } else { - r1 = ret.Error(1) + if len(ret) == 0 { + panic("no return value specified for APIKeyList") } - return r0, r1 -} - -// AnnouncementList provides a mock function with given fields: ctx, pagination, ordination -func (_m *Store) AnnouncementList(ctx context.Context, pagination paginator.Query, ordination order.Query) ([]models.AnnouncementShort, int, error) { - ret := _m.Called(ctx, pagination, ordination) - - var r0 []models.AnnouncementShort + var r0 []models.APIKey var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, order.Query) ([]models.AnnouncementShort, int, error)); ok { - return rf(ctx, pagination, ordination) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) ([]models.APIKey, int, error)); ok { + return rf(ctx, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, order.Query) []models.AnnouncementShort); ok { - r0 = rf(ctx, pagination, ordination) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) []models.APIKey); ok { + r0 = rf(ctx, opts...) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]models.AnnouncementShort) + r0 = ret.Get(0).([]models.APIKey) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query, order.Query) int); ok { - r1 = rf(ctx, pagination, ordination) + if rf, ok := ret.Get(1).(func(context.Context, ...store.QueryOption) int); ok { + r1 = rf(ctx, opts...) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query, order.Query) error); ok { - r2 = rf(ctx, pagination, ordination) + if rf, ok := ret.Get(2).(func(context.Context, ...store.QueryOption) error); ok { + r2 = rf(ctx, opts...) } else { r2 = ret.Error(2) } @@ -151,65 +145,36 @@ func (_m *Store) AnnouncementList(ctx context.Context, pagination paginator.Quer return r0, r1, r2 } -// AnnouncementUpdate provides a mock function with given fields: ctx, announcement -func (_m *Store) AnnouncementUpdate(ctx context.Context, announcement *models.Announcement) error { - ret := _m.Called(ctx, announcement) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Announcement) error); ok { - r0 = rf(ctx, announcement) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// DeleteCodes provides a mock function with given fields: ctx, username -func (_m *Store) DeleteCodes(ctx context.Context, username string) error { - ret := _m.Called(ctx, username) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, username) - } else { - r0 = ret.Error(0) +// APIKeyResolve provides a mock function with given fields: ctx, resolver, value, opts +func (_m *Store) APIKeyResolve(ctx context.Context, resolver store.APIKeyResolver, value string, opts ...store.QueryOption) (*models.APIKey, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] } + var _ca []interface{} + _ca = append(_ca, ctx, resolver, value) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) - return r0 -} - -// DeleteSecret provides a mock function with given fields: ctx, username -func (_m *Store) DeleteSecret(ctx context.Context, username string) error { - ret := _m.Called(ctx, username) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, username) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for APIKeyResolve") } - return r0 -} - -// DeviceBulkDeleteTag provides a mock function with given fields: ctx, tenant, tag -func (_m *Store) DeviceBulkDeleteTag(ctx context.Context, tenant string, tag string) (int64, error) { - ret := _m.Called(ctx, tenant, tag) - - var r0 int64 + var r0 *models.APIKey var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (int64, error)); ok { - return rf(ctx, tenant, tag) + if rf, ok := ret.Get(0).(func(context.Context, store.APIKeyResolver, string, ...store.QueryOption) (*models.APIKey, error)); ok { + return rf(ctx, resolver, value, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, string, string) int64); ok { - r0 = rf(ctx, tenant, tag) + if rf, ok := ret.Get(0).(func(context.Context, store.APIKeyResolver, string, ...store.QueryOption) *models.APIKey); ok { + r0 = rf(ctx, resolver, value, opts...) } else { - r0 = ret.Get(0).(int64) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.APIKey) + } } - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, tenant, tag) + if rf, ok := ret.Get(1).(func(context.Context, store.APIKeyResolver, string, ...store.QueryOption) error); ok { + r1 = rf(ctx, resolver, value, opts...) } else { r1 = ret.Error(1) } @@ -217,37 +182,17 @@ func (_m *Store) DeviceBulkDeleteTag(ctx context.Context, tenant string, tag str return r0, r1 } -// DeviceBulkRenameTag provides a mock function with given fields: ctx, tenant, currentTag, newTag -func (_m *Store) DeviceBulkRenameTag(ctx context.Context, tenant string, currentTag string, newTag string) (int64, error) { - ret := _m.Called(ctx, tenant, currentTag, newTag) - - var r0 int64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) (int64, error)); ok { - return rf(ctx, tenant, currentTag, newTag) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) int64); ok { - r0 = rf(ctx, tenant, currentTag, newTag) - } else { - r0 = ret.Get(0).(int64) - } +// APIKeyUpdate provides a mock function with given fields: ctx, apiKey +func (_m *Store) APIKeyUpdate(ctx context.Context, apiKey *models.APIKey) error { + ret := _m.Called(ctx, apiKey) - if rf, ok := ret.Get(1).(func(context.Context, string, string, string) error); ok { - r1 = rf(ctx, tenant, currentTag, newTag) - } else { - r1 = ret.Error(1) + if len(ret) == 0 { + panic("no return value specified for APIKeyUpdate") } - return r0, r1 -} - -// DeviceChooser provides a mock function with given fields: ctx, tenantID, chosen -func (_m *Store) DeviceChooser(ctx context.Context, tenantID string, chosen []string) error { - ret := _m.Called(ctx, tenantID, chosen) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, []string) error); ok { - r0 = rf(ctx, tenantID, chosen) + if rf, ok := ret.Get(0).(func(context.Context, *models.APIKey) error); ok { + r0 = rf(ctx, apiKey) } else { r0 = ret.Error(0) } @@ -255,27 +200,17 @@ func (_m *Store) DeviceChooser(ctx context.Context, tenantID string, chosen []st return r0 } -// DeviceCreate provides a mock function with given fields: ctx, d, hostname -func (_m *Store) DeviceCreate(ctx context.Context, d models.Device, hostname string) error { - ret := _m.Called(ctx, d, hostname) +// ActiveSessionCreate provides a mock function with given fields: ctx, session +func (_m *Store) ActiveSessionCreate(ctx context.Context, session *models.Session) error { + ret := _m.Called(ctx, session) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.Device, string) error); ok { - r0 = rf(ctx, d, hostname) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for ActiveSessionCreate") } - return r0 -} - -// DeviceCreatePublicURLAddress provides a mock function with given fields: ctx, uid -func (_m *Store) DeviceCreatePublicURLAddress(ctx context.Context, uid models.UID) error { - ret := _m.Called(ctx, uid) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID) error); ok { - r0 = rf(ctx, uid) + if rf, ok := ret.Get(0).(func(context.Context, *models.Session) error); ok { + r0 = rf(ctx, session) } else { r0 = ret.Error(0) } @@ -283,10 +218,14 @@ func (_m *Store) DeviceCreatePublicURLAddress(ctx context.Context, uid models.UI return r0 } -// DeviceDelete provides a mock function with given fields: ctx, uid -func (_m *Store) DeviceDelete(ctx context.Context, uid models.UID) error { +// ActiveSessionDelete provides a mock function with given fields: ctx, uid +func (_m *Store) ActiveSessionDelete(ctx context.Context, uid models.UID) error { ret := _m.Called(ctx, uid) + if len(ret) == 0 { + panic("no return value specified for ActiveSessionDelete") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, models.UID) error); ok { r0 = rf(ctx, uid) @@ -297,25 +236,29 @@ func (_m *Store) DeviceDelete(ctx context.Context, uid models.UID) error { return r0 } -// DeviceGet provides a mock function with given fields: ctx, uid -func (_m *Store) DeviceGet(ctx context.Context, uid models.UID) (*models.Device, error) { - ret := _m.Called(ctx, uid) +// ActiveSessionResolve provides a mock function with given fields: ctx, resolver, value +func (_m *Store) ActiveSessionResolve(ctx context.Context, resolver store.SessionResolver, value string) (*models.ActiveSession, error) { + ret := _m.Called(ctx, resolver, value) - var r0 *models.Device + if len(ret) == 0 { + panic("no return value specified for ActiveSessionResolve") + } + + var r0 *models.ActiveSession var r1 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID) (*models.Device, error)); ok { - return rf(ctx, uid) + if rf, ok := ret.Get(0).(func(context.Context, store.SessionResolver, string) (*models.ActiveSession, error)); ok { + return rf(ctx, resolver, value) } - if rf, ok := ret.Get(0).(func(context.Context, models.UID) *models.Device); ok { - r0 = rf(ctx, uid) + if rf, ok := ret.Get(0).(func(context.Context, store.SessionResolver, string) *models.ActiveSession); ok { + r0 = rf(ctx, resolver, value) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) + r0 = ret.Get(0).(*models.ActiveSession) } } - if rf, ok := ret.Get(1).(func(context.Context, models.UID) error); ok { - r1 = rf(ctx, uid) + if rf, ok := ret.Get(1).(func(context.Context, store.SessionResolver, string) error); ok { + r1 = rf(ctx, resolver, value) } else { r1 = ret.Error(1) } @@ -323,77 +266,82 @@ func (_m *Store) DeviceGet(ctx context.Context, uid models.UID) (*models.Device, return r0, r1 } -// DeviceGetByMac provides a mock function with given fields: ctx, mac, tenantID, status -func (_m *Store) DeviceGetByMac(ctx context.Context, mac string, tenantID string, status models.DeviceStatus) (*models.Device, error) { - ret := _m.Called(ctx, mac, tenantID, status) +// ActiveSessionUpdate provides a mock function with given fields: ctx, activeSession +func (_m *Store) ActiveSessionUpdate(ctx context.Context, activeSession *models.ActiveSession) error { + ret := _m.Called(ctx, activeSession) - var r0 *models.Device - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, models.DeviceStatus) (*models.Device, error)); ok { - return rf(ctx, mac, tenantID, status) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string, models.DeviceStatus) *models.Device); ok { - r0 = rf(ctx, mac, tenantID, status) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) - } + if len(ret) == 0 { + panic("no return value specified for ActiveSessionUpdate") } - if rf, ok := ret.Get(1).(func(context.Context, string, string, models.DeviceStatus) error); ok { - r1 = rf(ctx, mac, tenantID, status) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.ActiveSession) error); ok { + r0 = rf(ctx, activeSession) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// DeviceGetByName provides a mock function with given fields: ctx, name, tenantID, status -func (_m *Store) DeviceGetByName(ctx context.Context, name string, tenantID string, status models.DeviceStatus) (*models.Device, error) { - ret := _m.Called(ctx, name, tenantID, status) +// DeviceConflicts provides a mock function with given fields: ctx, target +func (_m *Store) DeviceConflicts(ctx context.Context, target *models.DeviceConflicts) ([]string, bool, error) { + ret := _m.Called(ctx, target) - var r0 *models.Device - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, models.DeviceStatus) (*models.Device, error)); ok { - return rf(ctx, name, tenantID, status) + if len(ret) == 0 { + panic("no return value specified for DeviceConflicts") + } + + var r0 []string + var r1 bool + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, *models.DeviceConflicts) ([]string, bool, error)); ok { + return rf(ctx, target) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, models.DeviceStatus) *models.Device); ok { - r0 = rf(ctx, name, tenantID, status) + if rf, ok := ret.Get(0).(func(context.Context, *models.DeviceConflicts) []string); ok { + r0 = rf(ctx, target) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) + r0 = ret.Get(0).([]string) } } - if rf, ok := ret.Get(1).(func(context.Context, string, string, models.DeviceStatus) error); ok { - r1 = rf(ctx, name, tenantID, status) + if rf, ok := ret.Get(1).(func(context.Context, *models.DeviceConflicts) bool); ok { + r1 = rf(ctx, target) } else { - r1 = ret.Error(1) + r1 = ret.Get(1).(bool) } - return r0, r1 + if rf, ok := ret.Get(2).(func(context.Context, *models.DeviceConflicts) error); ok { + r2 = rf(ctx, target) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 } -// DeviceGetByPublicURLAddress provides a mock function with given fields: ctx, address -func (_m *Store) DeviceGetByPublicURLAddress(ctx context.Context, address string) (*models.Device, error) { - ret := _m.Called(ctx, address) +// DeviceCreate provides a mock function with given fields: ctx, device +func (_m *Store) DeviceCreate(ctx context.Context, device *models.Device) (string, error) { + ret := _m.Called(ctx, device) - var r0 *models.Device + if len(ret) == 0 { + panic("no return value specified for DeviceCreate") + } + + var r0 string var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Device, error)); ok { - return rf(ctx, address) + if rf, ok := ret.Get(0).(func(context.Context, *models.Device) (string, error)); ok { + return rf(ctx, device) } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.Device); ok { - r0 = rf(ctx, address) + if rf, ok := ret.Get(0).(func(context.Context, *models.Device) string); ok { + r0 = rf(ctx, device) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) - } + r0 = ret.Get(0).(string) } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, address) + if rf, ok := ret.Get(1).(func(context.Context, *models.Device) error); ok { + r1 = rf(ctx, device) } else { r1 = ret.Error(1) } @@ -401,25 +349,45 @@ func (_m *Store) DeviceGetByPublicURLAddress(ctx context.Context, address string return r0, r1 } -// DeviceGetByUID provides a mock function with given fields: ctx, uid, tenantID -func (_m *Store) DeviceGetByUID(ctx context.Context, uid models.UID, tenantID string) (*models.Device, error) { - ret := _m.Called(ctx, uid, tenantID) +// DeviceDelete provides a mock function with given fields: ctx, device +func (_m *Store) DeviceDelete(ctx context.Context, device *models.Device) error { + ret := _m.Called(ctx, device) - var r0 *models.Device + if len(ret) == 0 { + panic("no return value specified for DeviceDelete") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Device) error); ok { + r0 = rf(ctx, device) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeviceDeleteMany provides a mock function with given fields: ctx, uids +func (_m *Store) DeviceDeleteMany(ctx context.Context, uids []string) (int64, error) { + ret := _m.Called(ctx, uids) + + if len(ret) == 0 { + panic("no return value specified for DeviceDeleteMany") + } + + var r0 int64 var r1 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, string) (*models.Device, error)); ok { - return rf(ctx, uid, tenantID) + if rf, ok := ret.Get(0).(func(context.Context, []string) (int64, error)); ok { + return rf(ctx, uids) } - if rf, ok := ret.Get(0).(func(context.Context, models.UID, string) *models.Device); ok { - r0 = rf(ctx, uid, tenantID) + if rf, ok := ret.Get(0).(func(context.Context, []string) int64); ok { + r0 = rf(ctx, uids) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) - } + r0 = ret.Get(0).(int64) } - if rf, ok := ret.Get(1).(func(context.Context, models.UID, string) error); ok { - r1 = rf(ctx, uid, tenantID) + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, uids) } else { r1 = ret.Error(1) } @@ -427,65 +395,71 @@ func (_m *Store) DeviceGetByUID(ctx context.Context, uid models.UID, tenantID st return r0, r1 } -// DeviceGetTags provides a mock function with given fields: ctx, tenant -func (_m *Store) DeviceGetTags(ctx context.Context, tenant string) ([]string, int, error) { - ret := _m.Called(ctx, tenant) +// DeviceHeartbeat provides a mock function with given fields: ctx, uids, lastSeen +func (_m *Store) DeviceHeartbeat(ctx context.Context, uids []string, lastSeen time.Time) (int64, error) { + ret := _m.Called(ctx, uids, lastSeen) - var r0 []string - var r1 int - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]string, int, error)); ok { - return rf(ctx, tenant) - } - if rf, ok := ret.Get(0).(func(context.Context, string) []string); ok { - r0 = rf(ctx, tenant) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]string) - } + if len(ret) == 0 { + panic("no return value specified for DeviceHeartbeat") } - if rf, ok := ret.Get(1).(func(context.Context, string) int); ok { - r1 = rf(ctx, tenant) + var r0 int64 + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []string, time.Time) (int64, error)); ok { + return rf(ctx, uids, lastSeen) + } + if rf, ok := ret.Get(0).(func(context.Context, []string, time.Time) int64); ok { + r0 = rf(ctx, uids, lastSeen) } else { - r1 = ret.Get(1).(int) + r0 = ret.Get(0).(int64) } - if rf, ok := ret.Get(2).(func(context.Context, string) error); ok { - r2 = rf(ctx, tenant) + if rf, ok := ret.Get(1).(func(context.Context, []string, time.Time) error); ok { + r1 = rf(ctx, uids, lastSeen) } else { - r2 = ret.Error(2) + r1 = ret.Error(1) } - return r0, r1, r2 + return r0, r1 } -// DeviceList provides a mock function with given fields: ctx, pagination, filters, status, sort, _a5, mode -func (_m *Store) DeviceList(ctx context.Context, pagination paginator.Query, filters []models.Filter, status models.DeviceStatus, sort string, _a5 string, mode store.DeviceListMode) ([]models.Device, int, error) { - ret := _m.Called(ctx, pagination, filters, status, sort, _a5, mode) +// DeviceList provides a mock function with given fields: ctx, acceptable, opts +func (_m *Store) DeviceList(ctx context.Context, acceptable store.DeviceAcceptable, opts ...store.QueryOption) ([]models.Device, int, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, acceptable) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DeviceList") + } var r0 []models.Device var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, []models.Filter, models.DeviceStatus, string, string, store.DeviceListMode) ([]models.Device, int, error)); ok { - return rf(ctx, pagination, filters, status, sort, _a5, mode) + if rf, ok := ret.Get(0).(func(context.Context, store.DeviceAcceptable, ...store.QueryOption) ([]models.Device, int, error)); ok { + return rf(ctx, acceptable, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, []models.Filter, models.DeviceStatus, string, string, store.DeviceListMode) []models.Device); ok { - r0 = rf(ctx, pagination, filters, status, sort, _a5, mode) + if rf, ok := ret.Get(0).(func(context.Context, store.DeviceAcceptable, ...store.QueryOption) []models.Device); ok { + r0 = rf(ctx, acceptable, opts...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.Device) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query, []models.Filter, models.DeviceStatus, string, string, store.DeviceListMode) int); ok { - r1 = rf(ctx, pagination, filters, status, sort, _a5, mode) + if rf, ok := ret.Get(1).(func(context.Context, store.DeviceAcceptable, ...store.QueryOption) int); ok { + r1 = rf(ctx, acceptable, opts...) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query, []models.Filter, models.DeviceStatus, string, string, store.DeviceListMode) error); ok { - r2 = rf(ctx, pagination, filters, status, sort, _a5, mode) + if rf, ok := ret.Get(2).(func(context.Context, store.DeviceAcceptable, ...store.QueryOption) error); ok { + r2 = rf(ctx, acceptable, opts...) } else { r2 = ret.Error(2) } @@ -493,51 +467,36 @@ func (_m *Store) DeviceList(ctx context.Context, pagination paginator.Query, fil return r0, r1, r2 } -// DeviceListByUsage provides a mock function with given fields: ctx, tenantID -func (_m *Store) DeviceListByUsage(ctx context.Context, tenantID string) ([]models.UID, error) { - ret := _m.Called(ctx, tenantID) - - var r0 []models.UID - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]models.UID, error)); ok { - return rf(ctx, tenantID) - } - if rf, ok := ret.Get(0).(func(context.Context, string) []models.UID); ok { - r0 = rf(ctx, tenantID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]models.UID) - } +// DeviceResolve provides a mock function with given fields: ctx, resolver, value, opts +func (_m *Store) DeviceResolve(ctx context.Context, resolver store.DeviceResolver, value string, opts ...store.QueryOption) (*models.Device, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] } + var _ca []interface{} + _ca = append(_ca, ctx, resolver, value) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, tenantID) - } else { - r1 = ret.Error(1) + if len(ret) == 0 { + panic("no return value specified for DeviceResolve") } - return r0, r1 -} - -// DeviceLookup provides a mock function with given fields: ctx, namespace, hostname -func (_m *Store) DeviceLookup(ctx context.Context, namespace string, hostname string) (*models.Device, error) { - ret := _m.Called(ctx, namespace, hostname) - var r0 *models.Device var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.Device, error)); ok { - return rf(ctx, namespace, hostname) + if rf, ok := ret.Get(0).(func(context.Context, store.DeviceResolver, string, ...store.QueryOption) (*models.Device, error)); ok { + return rf(ctx, resolver, value, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.Device); ok { - r0 = rf(ctx, namespace, hostname) + if rf, ok := ret.Get(0).(func(context.Context, store.DeviceResolver, string, ...store.QueryOption) *models.Device); ok { + r0 = rf(ctx, resolver, value, opts...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Device) } } - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, namespace, hostname) + if rf, ok := ret.Get(1).(func(context.Context, store.DeviceResolver, string, ...store.QueryOption) error); ok { + r1 = rf(ctx, resolver, value, opts...) } else { r1 = ret.Error(1) } @@ -545,27 +504,17 @@ func (_m *Store) DeviceLookup(ctx context.Context, namespace string, hostname st return r0, r1 } -// DevicePullTag provides a mock function with given fields: ctx, uid, tag -func (_m *Store) DevicePullTag(ctx context.Context, uid models.UID, tag string) error { - ret := _m.Called(ctx, uid, tag) +// DeviceUpdate provides a mock function with given fields: ctx, device +func (_m *Store) DeviceUpdate(ctx context.Context, device *models.Device) error { + ret := _m.Called(ctx, device) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, string) error); ok { - r0 = rf(ctx, uid, tag) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for DeviceUpdate") } - return r0 -} - -// DevicePushTag provides a mock function with given fields: ctx, uid, tag -func (_m *Store) DevicePushTag(ctx context.Context, uid models.UID, tag string) error { - ret := _m.Called(ctx, uid, tag) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, string) error); ok { - r0 = rf(ctx, uid, tag) + if rf, ok := ret.Get(0).(func(context.Context, *models.Device) error); ok { + r0 = rf(ctx, device) } else { r0 = ret.Error(0) } @@ -573,23 +522,29 @@ func (_m *Store) DevicePushTag(ctx context.Context, uid models.UID, tag string) return r0 } -// DeviceRemovedCount provides a mock function with given fields: ctx, tenant -func (_m *Store) DeviceRemovedCount(ctx context.Context, tenant string) (int64, error) { - ret := _m.Called(ctx, tenant) +// GetStats provides a mock function with given fields: ctx, tenantID +func (_m *Store) GetStats(ctx context.Context, tenantID string) (*models.Stats, error) { + ret := _m.Called(ctx, tenantID) - var r0 int64 + if len(ret) == 0 { + panic("no return value specified for GetStats") + } + + var r0 *models.Stats var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (int64, error)); ok { - return rf(ctx, tenant) + if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Stats, error)); ok { + return rf(ctx, tenantID) } - if rf, ok := ret.Get(0).(func(context.Context, string) int64); ok { - r0 = rf(ctx, tenant) + if rf, ok := ret.Get(0).(func(context.Context, string) *models.Stats); ok { + r0 = rf(ctx, tenantID) } else { - r0 = ret.Get(0).(int64) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Stats) + } } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, tenant) + r1 = rf(ctx, tenantID) } else { r1 = ret.Error(1) } @@ -597,13 +552,17 @@ func (_m *Store) DeviceRemovedCount(ctx context.Context, tenant string) (int64, return r0, r1 } -// DeviceRemovedDelete provides a mock function with given fields: ctx, tenant, uid -func (_m *Store) DeviceRemovedDelete(ctx context.Context, tenant string, uid models.UID) error { - ret := _m.Called(ctx, tenant, uid) +// MembershipInvitationCreate provides a mock function with given fields: ctx, invitation +func (_m *Store) MembershipInvitationCreate(ctx context.Context, invitation *models.MembershipInvitation) error { + ret := _m.Called(ctx, invitation) + + if len(ret) == 0 { + panic("no return value specified for MembershipInvitationCreate") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, models.UID) error); ok { - r0 = rf(ctx, tenant, uid) + if rf, ok := ret.Get(0).(func(context.Context, *models.MembershipInvitation) error); ok { + r0 = rf(ctx, invitation) } else { r0 = ret.Error(0) } @@ -611,25 +570,29 @@ func (_m *Store) DeviceRemovedDelete(ctx context.Context, tenant string, uid mod return r0 } -// DeviceRemovedGet provides a mock function with given fields: ctx, tenant, uid -func (_m *Store) DeviceRemovedGet(ctx context.Context, tenant string, uid models.UID) (*models.DeviceRemoved, error) { - ret := _m.Called(ctx, tenant, uid) +// MembershipInvitationResolve provides a mock function with given fields: ctx, tenantID, userID +func (_m *Store) MembershipInvitationResolve(ctx context.Context, tenantID string, userID string) (*models.MembershipInvitation, error) { + ret := _m.Called(ctx, tenantID, userID) + + if len(ret) == 0 { + panic("no return value specified for MembershipInvitationResolve") + } - var r0 *models.DeviceRemoved + var r0 *models.MembershipInvitation var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, models.UID) (*models.DeviceRemoved, error)); ok { - return rf(ctx, tenant, uid) + if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.MembershipInvitation, error)); ok { + return rf(ctx, tenantID, userID) } - if rf, ok := ret.Get(0).(func(context.Context, string, models.UID) *models.DeviceRemoved); ok { - r0 = rf(ctx, tenant, uid) + if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.MembershipInvitation); ok { + r0 = rf(ctx, tenantID, userID) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.DeviceRemoved) + r0 = ret.Get(0).(*models.MembershipInvitation) } } - if rf, ok := ret.Get(1).(func(context.Context, string, models.UID) error); ok { - r1 = rf(ctx, tenant, uid) + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, tenantID, userID) } else { r1 = ret.Error(1) } @@ -637,13 +600,17 @@ func (_m *Store) DeviceRemovedGet(ctx context.Context, tenant string, uid models return r0, r1 } -// DeviceRemovedInsert provides a mock function with given fields: ctx, tenant, device -func (_m *Store) DeviceRemovedInsert(ctx context.Context, tenant string, device *models.Device) error { - ret := _m.Called(ctx, tenant, device) +// MembershipInvitationUpdate provides a mock function with given fields: ctx, invitation +func (_m *Store) MembershipInvitationUpdate(ctx context.Context, invitation *models.MembershipInvitation) error { + ret := _m.Called(ctx, invitation) + + if len(ret) == 0 { + panic("no return value specified for MembershipInvitationUpdate") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, *models.Device) error); ok { - r0 = rf(ctx, tenant, device) + if rf, ok := ret.Get(0).(func(context.Context, *models.MembershipInvitation) error); ok { + r0 = rf(ctx, invitation) } else { r0 = ret.Error(0) } @@ -651,32 +618,36 @@ func (_m *Store) DeviceRemovedInsert(ctx context.Context, tenant string, device return r0 } -// DeviceRemovedList provides a mock function with given fields: ctx, tenant, pagination, filters, sort, _a5 -func (_m *Store) DeviceRemovedList(ctx context.Context, tenant string, pagination paginator.Query, filters []models.Filter, sort string, _a5 string) ([]models.DeviceRemoved, int, error) { - ret := _m.Called(ctx, tenant, pagination, filters, sort, _a5) +// NamespaceConflicts provides a mock function with given fields: ctx, target +func (_m *Store) NamespaceConflicts(ctx context.Context, target *models.NamespaceConflicts) ([]string, bool, error) { + ret := _m.Called(ctx, target) - var r0 []models.DeviceRemoved - var r1 int + if len(ret) == 0 { + panic("no return value specified for NamespaceConflicts") + } + + var r0 []string + var r1 bool var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string, paginator.Query, []models.Filter, string, string) ([]models.DeviceRemoved, int, error)); ok { - return rf(ctx, tenant, pagination, filters, sort, _a5) + if rf, ok := ret.Get(0).(func(context.Context, *models.NamespaceConflicts) ([]string, bool, error)); ok { + return rf(ctx, target) } - if rf, ok := ret.Get(0).(func(context.Context, string, paginator.Query, []models.Filter, string, string) []models.DeviceRemoved); ok { - r0 = rf(ctx, tenant, pagination, filters, sort, _a5) + if rf, ok := ret.Get(0).(func(context.Context, *models.NamespaceConflicts) []string); ok { + r0 = rf(ctx, target) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]models.DeviceRemoved) + r0 = ret.Get(0).([]string) } } - if rf, ok := ret.Get(1).(func(context.Context, string, paginator.Query, []models.Filter, string, string) int); ok { - r1 = rf(ctx, tenant, pagination, filters, sort, _a5) + if rf, ok := ret.Get(1).(func(context.Context, *models.NamespaceConflicts) bool); ok { + r1 = rf(ctx, target) } else { - r1 = ret.Get(1).(int) + r1 = ret.Get(1).(bool) } - if rf, ok := ret.Get(2).(func(context.Context, string, paginator.Query, []models.Filter, string, string) error); ok { - r2 = rf(ctx, tenant, pagination, filters, sort, _a5) + if rf, ok := ret.Get(2).(func(context.Context, *models.NamespaceConflicts) error); ok { + r2 = rf(ctx, target) } else { r2 = ret.Error(2) } @@ -684,552 +655,23 @@ func (_m *Store) DeviceRemovedList(ctx context.Context, tenant string, paginatio return r0, r1, r2 } -// DeviceRename provides a mock function with given fields: ctx, uid, hostname -func (_m *Store) DeviceRename(ctx context.Context, uid models.UID, hostname string) error { - ret := _m.Called(ctx, uid, hostname) +// NamespaceCreate provides a mock function with given fields: ctx, namespace +func (_m *Store) NamespaceCreate(ctx context.Context, namespace *models.Namespace) (string, error) { + ret := _m.Called(ctx, namespace) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, string) error); ok { - r0 = rf(ctx, uid, hostname) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for NamespaceCreate") } - return r0 -} - -// DeviceSetOnline provides a mock function with given fields: ctx, uid, timestamp, online -func (_m *Store) DeviceSetOnline(ctx context.Context, uid models.UID, timestamp time.Time, online bool) error { - ret := _m.Called(ctx, uid, timestamp, online) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, time.Time, bool) error); ok { - r0 = rf(ctx, uid, timestamp, online) - } else { - r0 = ret.Error(0) + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Namespace) (string, error)); ok { + return rf(ctx, namespace) } - - return r0 -} - -// DeviceSetPosition provides a mock function with given fields: ctx, uid, position -func (_m *Store) DeviceSetPosition(ctx context.Context, uid models.UID, position models.DevicePosition) error { - ret := _m.Called(ctx, uid, position) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, models.DevicePosition) error); ok { - r0 = rf(ctx, uid, position) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// DeviceSetTags provides a mock function with given fields: ctx, uid, tags -func (_m *Store) DeviceSetTags(ctx context.Context, uid models.UID, tags []string) (int64, int64, error) { - ret := _m.Called(ctx, uid, tags) - - var r0 int64 - var r1 int64 - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, []string) (int64, int64, error)); ok { - return rf(ctx, uid, tags) - } - if rf, ok := ret.Get(0).(func(context.Context, models.UID, []string) int64); ok { - r0 = rf(ctx, uid, tags) - } else { - r0 = ret.Get(0).(int64) - } - - if rf, ok := ret.Get(1).(func(context.Context, models.UID, []string) int64); ok { - r1 = rf(ctx, uid, tags) - } else { - r1 = ret.Get(1).(int64) - } - - if rf, ok := ret.Get(2).(func(context.Context, models.UID, []string) error); ok { - r2 = rf(ctx, uid, tags) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 -} - -// DeviceUpdate provides a mock function with given fields: ctx, tenant, uid, name, publicURL -func (_m *Store) DeviceUpdate(ctx context.Context, tenant string, uid models.UID, name *string, publicURL *bool) error { - ret := _m.Called(ctx, tenant, uid, name, publicURL) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, models.UID, *string, *bool) error); ok { - r0 = rf(ctx, tenant, uid, name, publicURL) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// DeviceUpdateLastSeen provides a mock function with given fields: ctx, uid, ts -func (_m *Store) DeviceUpdateLastSeen(ctx context.Context, uid models.UID, ts time.Time) error { - ret := _m.Called(ctx, uid, ts) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, time.Time) error); ok { - r0 = rf(ctx, uid, ts) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// DeviceUpdateOnline provides a mock function with given fields: ctx, uid, online -func (_m *Store) DeviceUpdateOnline(ctx context.Context, uid models.UID, online bool) error { - ret := _m.Called(ctx, uid, online) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, bool) error); ok { - r0 = rf(ctx, uid, online) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// DeviceUpdateStatus provides a mock function with given fields: ctx, uid, status -func (_m *Store) DeviceUpdateStatus(ctx context.Context, uid models.UID, status models.DeviceStatus) error { - ret := _m.Called(ctx, uid, status) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, models.DeviceStatus) error); ok { - r0 = rf(ctx, uid, status) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// FirewallRuleBulkDeleteTag provides a mock function with given fields: ctx, tenant, tag -func (_m *Store) FirewallRuleBulkDeleteTag(ctx context.Context, tenant string, tag string) (int64, error) { - ret := _m.Called(ctx, tenant, tag) - - var r0 int64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (int64, error)); ok { - return rf(ctx, tenant, tag) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string) int64); ok { - r0 = rf(ctx, tenant, tag) - } else { - r0 = ret.Get(0).(int64) - } - - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, tenant, tag) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FirewallRuleBulkRenameTag provides a mock function with given fields: ctx, tenant, currentTag, newTag -func (_m *Store) FirewallRuleBulkRenameTag(ctx context.Context, tenant string, currentTag string, newTag string) (int64, error) { - ret := _m.Called(ctx, tenant, currentTag, newTag) - - var r0 int64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) (int64, error)); ok { - return rf(ctx, tenant, currentTag, newTag) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) int64); ok { - r0 = rf(ctx, tenant, currentTag, newTag) - } else { - r0 = ret.Get(0).(int64) - } - - if rf, ok := ret.Get(1).(func(context.Context, string, string, string) error); ok { - r1 = rf(ctx, tenant, currentTag, newTag) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FirewallRuleCreate provides a mock function with given fields: ctx, rule -func (_m *Store) FirewallRuleCreate(ctx context.Context, rule *models.FirewallRule) error { - ret := _m.Called(ctx, rule) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.FirewallRule) error); ok { - r0 = rf(ctx, rule) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// FirewallRuleDelete provides a mock function with given fields: ctx, id -func (_m *Store) FirewallRuleDelete(ctx context.Context, id string) error { - ret := _m.Called(ctx, id) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, id) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// FirewallRuleGet provides a mock function with given fields: ctx, id -func (_m *Store) FirewallRuleGet(ctx context.Context, id string) (*models.FirewallRule, error) { - ret := _m.Called(ctx, id) - - var r0 *models.FirewallRule - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.FirewallRule, error)); ok { - return rf(ctx, id) - } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.FirewallRule); ok { - r0 = rf(ctx, id) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.FirewallRule) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FirewallRuleGetTags provides a mock function with given fields: ctx, tenant -func (_m *Store) FirewallRuleGetTags(ctx context.Context, tenant string) ([]string, int, error) { - ret := _m.Called(ctx, tenant) - - var r0 []string - var r1 int - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]string, int, error)); ok { - return rf(ctx, tenant) - } - if rf, ok := ret.Get(0).(func(context.Context, string) []string); ok { - r0 = rf(ctx, tenant) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]string) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string) int); ok { - r1 = rf(ctx, tenant) - } else { - r1 = ret.Get(1).(int) - } - - if rf, ok := ret.Get(2).(func(context.Context, string) error); ok { - r2 = rf(ctx, tenant) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 -} - -// FirewallRuleList provides a mock function with given fields: ctx, pagination -func (_m *Store) FirewallRuleList(ctx context.Context, pagination paginator.Query) ([]models.FirewallRule, int, error) { - ret := _m.Called(ctx, pagination) - - var r0 []models.FirewallRule - var r1 int - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) ([]models.FirewallRule, int, error)); ok { - return rf(ctx, pagination) - } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) []models.FirewallRule); ok { - r0 = rf(ctx, pagination) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]models.FirewallRule) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query) int); ok { - r1 = rf(ctx, pagination) - } else { - r1 = ret.Get(1).(int) - } - - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query) error); ok { - r2 = rf(ctx, pagination) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 -} - -// FirewallRulePullTag provides a mock function with given fields: ctx, id, tag -func (_m *Store) FirewallRulePullTag(ctx context.Context, id string, tag string) error { - ret := _m.Called(ctx, id, tag) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { - r0 = rf(ctx, id, tag) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// FirewallRulePushTag provides a mock function with given fields: ctx, id, tag -func (_m *Store) FirewallRulePushTag(ctx context.Context, id string, tag string) error { - ret := _m.Called(ctx, id, tag) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { - r0 = rf(ctx, id, tag) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// FirewallRuleSetTags provides a mock function with given fields: ctx, id, tags -func (_m *Store) FirewallRuleSetTags(ctx context.Context, id string, tags []string) error { - ret := _m.Called(ctx, id, tags) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, []string) error); ok { - r0 = rf(ctx, id, tags) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// FirewallRuleUpdate provides a mock function with given fields: ctx, id, rule -func (_m *Store) FirewallRuleUpdate(ctx context.Context, id string, rule models.FirewallRuleUpdate) (*models.FirewallRule, error) { - ret := _m.Called(ctx, id, rule) - - var r0 *models.FirewallRule - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, models.FirewallRuleUpdate) (*models.FirewallRule, error)); ok { - return rf(ctx, id, rule) - } - if rf, ok := ret.Get(0).(func(context.Context, string, models.FirewallRuleUpdate) *models.FirewallRule); ok { - r0 = rf(ctx, id, rule) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.FirewallRule) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string, models.FirewallRuleUpdate) error); ok { - r1 = rf(ctx, id, rule) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetCodes provides a mock function with given fields: ctx, id -func (_m *Store) GetCodes(ctx context.Context, id string) ([]string, error) { - ret := _m.Called(ctx, id) - - var r0 []string - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]string, error)); ok { - return rf(ctx, id) - } - if rf, ok := ret.Get(0).(func(context.Context, string) []string); ok { - r0 = rf(ctx, id) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]string) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetSecret provides a mock function with given fields: ctx, id -func (_m *Store) GetSecret(ctx context.Context, id string) (string, error) { - ret := _m.Called(ctx, id) - - var r0 string - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (string, error)); ok { - return rf(ctx, id) - } - if rf, ok := ret.Get(0).(func(context.Context, string) string); ok { - r0 = rf(ctx, id) - } else { - r0 = ret.Get(0).(string) - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetStats provides a mock function with given fields: ctx -func (_m *Store) GetStats(ctx context.Context) (*models.Stats, error) { - ret := _m.Called(ctx) - - var r0 *models.Stats - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*models.Stats, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *models.Stats); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Stats) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetStatusMFA provides a mock function with given fields: ctx, id -func (_m *Store) GetStatusMFA(ctx context.Context, id string) (bool, error) { - ret := _m.Called(ctx, id) - - var r0 bool - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (bool, error)); ok { - return rf(ctx, id) - } - if rf, ok := ret.Get(0).(func(context.Context, string) bool); ok { - r0 = rf(ctx, id) - } else { - r0 = ret.Get(0).(bool) - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// LicenseLoad provides a mock function with given fields: ctx -func (_m *Store) LicenseLoad(ctx context.Context) (*models.License, error) { - ret := _m.Called(ctx) - - var r0 *models.License - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*models.License, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *models.License); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.License) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// LicenseSave provides a mock function with given fields: ctx, license -func (_m *Store) LicenseSave(ctx context.Context, license *models.License) error { - ret := _m.Called(ctx, license) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.License) error); ok { - r0 = rf(ctx, license) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// NamespaceAddMember provides a mock function with given fields: ctx, tenantID, memberID, memberRole -func (_m *Store) NamespaceAddMember(ctx context.Context, tenantID string, memberID string, memberRole string) (*models.Namespace, error) { - ret := _m.Called(ctx, tenantID, memberID, memberRole) - - var r0 *models.Namespace - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) (*models.Namespace, error)); ok { - return rf(ctx, tenantID, memberID, memberRole) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) *models.Namespace); ok { - r0 = rf(ctx, tenantID, memberID, memberRole) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Namespace) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string, string, string) error); ok { - r1 = rf(ctx, tenantID, memberID, memberRole) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// NamespaceCreate provides a mock function with given fields: ctx, namespace -func (_m *Store) NamespaceCreate(ctx context.Context, namespace *models.Namespace) (*models.Namespace, error) { - ret := _m.Called(ctx, namespace) - - var r0 *models.Namespace - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Namespace) (*models.Namespace, error)); ok { - return rf(ctx, namespace) - } - if rf, ok := ret.Get(0).(func(context.Context, *models.Namespace) *models.Namespace); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.Namespace) string); ok { r0 = rf(ctx, namespace) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Namespace) - } + r0 = ret.Get(0).(string) } if rf, ok := ret.Get(1).(func(context.Context, *models.Namespace) error); ok { @@ -1241,13 +683,17 @@ func (_m *Store) NamespaceCreate(ctx context.Context, namespace *models.Namespac return r0, r1 } -// NamespaceDelete provides a mock function with given fields: ctx, tenantID -func (_m *Store) NamespaceDelete(ctx context.Context, tenantID string) error { - ret := _m.Called(ctx, tenantID) +// NamespaceCreateMembership provides a mock function with given fields: ctx, tenantID, member +func (_m *Store) NamespaceCreateMembership(ctx context.Context, tenantID string, member *models.Member) error { + ret := _m.Called(ctx, tenantID, member) + + if len(ret) == 0 { + panic("no return value specified for NamespaceCreateMembership") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, tenantID) + if rf, ok := ret.Get(0).(func(context.Context, string, *models.Member) error); ok { + r0 = rf(ctx, tenantID, member) } else { r0 = ret.Error(0) } @@ -1255,13 +701,17 @@ func (_m *Store) NamespaceDelete(ctx context.Context, tenantID string) error { return r0 } -// NamespaceEditMember provides a mock function with given fields: ctx, tenantID, memberID, memberNewRole -func (_m *Store) NamespaceEditMember(ctx context.Context, tenantID string, memberID string, memberNewRole string) error { - ret := _m.Called(ctx, tenantID, memberID, memberNewRole) +// NamespaceDelete provides a mock function with given fields: ctx, namespace +func (_m *Store) NamespaceDelete(ctx context.Context, namespace *models.Namespace) error { + ret := _m.Called(ctx, namespace) + + if len(ret) == 0 { + panic("no return value specified for NamespaceDelete") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok { - r0 = rf(ctx, tenantID, memberID, memberNewRole) + if rf, ok := ret.Get(0).(func(context.Context, *models.Namespace) error); ok { + r0 = rf(ctx, namespace) } else { r0 = ret.Error(0) } @@ -1269,25 +719,27 @@ func (_m *Store) NamespaceEditMember(ctx context.Context, tenantID string, membe return r0 } -// NamespaceGet provides a mock function with given fields: ctx, tenantID -func (_m *Store) NamespaceGet(ctx context.Context, tenantID string) (*models.Namespace, error) { - ret := _m.Called(ctx, tenantID) +// NamespaceDeleteMany provides a mock function with given fields: ctx, tenantIDs +func (_m *Store) NamespaceDeleteMany(ctx context.Context, tenantIDs []string) (int64, error) { + ret := _m.Called(ctx, tenantIDs) - var r0 *models.Namespace + if len(ret) == 0 { + panic("no return value specified for NamespaceDeleteMany") + } + + var r0 int64 var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Namespace, error)); ok { - return rf(ctx, tenantID) + if rf, ok := ret.Get(0).(func(context.Context, []string) (int64, error)); ok { + return rf(ctx, tenantIDs) } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.Namespace); ok { - r0 = rf(ctx, tenantID) + if rf, ok := ret.Get(0).(func(context.Context, []string) int64); ok { + r0 = rf(ctx, tenantIDs) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Namespace) - } + r0 = ret.Get(0).(int64) } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, tenantID) + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, tenantIDs) } else { r1 = ret.Error(1) } @@ -1295,43 +747,39 @@ func (_m *Store) NamespaceGet(ctx context.Context, tenantID string) (*models.Nam return r0, r1 } -// NamespaceGetByName provides a mock function with given fields: ctx, name -func (_m *Store) NamespaceGetByName(ctx context.Context, name string) (*models.Namespace, error) { - ret := _m.Called(ctx, name) +// NamespaceDeleteMembership provides a mock function with given fields: ctx, tenantID, member +func (_m *Store) NamespaceDeleteMembership(ctx context.Context, tenantID string, member *models.Member) error { + ret := _m.Called(ctx, tenantID, member) - var r0 *models.Namespace - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Namespace, error)); ok { - return rf(ctx, name) - } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.Namespace); ok { - r0 = rf(ctx, name) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Namespace) - } + if len(ret) == 0 { + panic("no return value specified for NamespaceDeleteMembership") } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, name) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, *models.Member) error); ok { + r0 = rf(ctx, tenantID, member) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// NamespaceGetFirst provides a mock function with given fields: ctx, id -func (_m *Store) NamespaceGetFirst(ctx context.Context, id string) (*models.Namespace, error) { - ret := _m.Called(ctx, id) +// NamespaceGetPreferred provides a mock function with given fields: ctx, userID +func (_m *Store) NamespaceGetPreferred(ctx context.Context, userID string) (*models.Namespace, error) { + ret := _m.Called(ctx, userID) + + if len(ret) == 0 { + panic("no return value specified for NamespaceGetPreferred") + } var r0 *models.Namespace var r1 error if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Namespace, error)); ok { - return rf(ctx, id) + return rf(ctx, userID) } if rf, ok := ret.Get(0).(func(context.Context, string) *models.Namespace); ok { - r0 = rf(ctx, id) + r0 = rf(ctx, userID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Namespace) @@ -1339,7 +787,7 @@ func (_m *Store) NamespaceGetFirst(ctx context.Context, id string) (*models.Name } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, id) + r1 = rf(ctx, userID) } else { r1 = ret.Error(1) } @@ -1347,56 +795,61 @@ func (_m *Store) NamespaceGetFirst(ctx context.Context, id string) (*models.Name return r0, r1 } -// NamespaceGetSessionRecord provides a mock function with given fields: ctx, tenantID -func (_m *Store) NamespaceGetSessionRecord(ctx context.Context, tenantID string) (bool, error) { - ret := _m.Called(ctx, tenantID) +// NamespaceIncrementDeviceCount provides a mock function with given fields: ctx, tenantID, status, count +func (_m *Store) NamespaceIncrementDeviceCount(ctx context.Context, tenantID string, status models.DeviceStatus, count int64) error { + ret := _m.Called(ctx, tenantID, status, count) - var r0 bool - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (bool, error)); ok { - return rf(ctx, tenantID) - } - if rf, ok := ret.Get(0).(func(context.Context, string) bool); ok { - r0 = rf(ctx, tenantID) - } else { - r0 = ret.Get(0).(bool) + if len(ret) == 0 { + panic("no return value specified for NamespaceIncrementDeviceCount") } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, tenantID) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, models.DeviceStatus, int64) error); ok { + r0 = rf(ctx, tenantID, status, count) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// NamespaceList provides a mock function with given fields: ctx, pagination, filters, export -func (_m *Store) NamespaceList(ctx context.Context, pagination paginator.Query, filters []models.Filter, export bool) ([]models.Namespace, int, error) { - ret := _m.Called(ctx, pagination, filters, export) +// NamespaceList provides a mock function with given fields: ctx, opts +func (_m *Store) NamespaceList(ctx context.Context, opts ...store.QueryOption) ([]models.Namespace, int, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for NamespaceList") + } var r0 []models.Namespace var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, []models.Filter, bool) ([]models.Namespace, int, error)); ok { - return rf(ctx, pagination, filters, export) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) ([]models.Namespace, int, error)); ok { + return rf(ctx, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, []models.Filter, bool) []models.Namespace); ok { - r0 = rf(ctx, pagination, filters, export) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) []models.Namespace); ok { + r0 = rf(ctx, opts...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.Namespace) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query, []models.Filter, bool) int); ok { - r1 = rf(ctx, pagination, filters, export) + if rf, ok := ret.Get(1).(func(context.Context, ...store.QueryOption) int); ok { + r1 = rf(ctx, opts...) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query, []models.Filter, bool) error); ok { - r2 = rf(ctx, pagination, filters, export) + if rf, ok := ret.Get(2).(func(context.Context, ...store.QueryOption) error); ok { + r2 = rf(ctx, opts...) } else { r2 = ret.Error(2) } @@ -1404,25 +857,29 @@ func (_m *Store) NamespaceList(ctx context.Context, pagination paginator.Query, return r0, r1, r2 } -// NamespaceRemoveMember provides a mock function with given fields: ctx, tenantID, memberID -func (_m *Store) NamespaceRemoveMember(ctx context.Context, tenantID string, memberID string) (*models.Namespace, error) { - ret := _m.Called(ctx, tenantID, memberID) +// NamespaceResolve provides a mock function with given fields: ctx, resolver, value +func (_m *Store) NamespaceResolve(ctx context.Context, resolver store.NamespaceResolver, value string) (*models.Namespace, error) { + ret := _m.Called(ctx, resolver, value) + + if len(ret) == 0 { + panic("no return value specified for NamespaceResolve") + } var r0 *models.Namespace var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.Namespace, error)); ok { - return rf(ctx, tenantID, memberID) + if rf, ok := ret.Get(0).(func(context.Context, store.NamespaceResolver, string) (*models.Namespace, error)); ok { + return rf(ctx, resolver, value) } - if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.Namespace); ok { - r0 = rf(ctx, tenantID, memberID) + if rf, ok := ret.Get(0).(func(context.Context, store.NamespaceResolver, string) *models.Namespace); ok { + r0 = rf(ctx, resolver, value) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Namespace) } } - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, tenantID, memberID) + if rf, ok := ret.Get(1).(func(context.Context, store.NamespaceResolver, string) error); ok { + r1 = rf(ctx, resolver, value) } else { r1 = ret.Error(1) } @@ -1430,39 +887,35 @@ func (_m *Store) NamespaceRemoveMember(ctx context.Context, tenantID string, mem return r0, r1 } -// NamespaceRename provides a mock function with given fields: ctx, tenantID, name -func (_m *Store) NamespaceRename(ctx context.Context, tenantID string, name string) (*models.Namespace, error) { - ret := _m.Called(ctx, tenantID, name) +// NamespaceUpdate provides a mock function with given fields: ctx, namespace +func (_m *Store) NamespaceUpdate(ctx context.Context, namespace *models.Namespace) error { + ret := _m.Called(ctx, namespace) - var r0 *models.Namespace - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.Namespace, error)); ok { - return rf(ctx, tenantID, name) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.Namespace); ok { - r0 = rf(ctx, tenantID, name) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Namespace) - } + if len(ret) == 0 { + panic("no return value specified for NamespaceUpdate") } - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, tenantID, name) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Namespace) error); ok { + r0 = rf(ctx, namespace) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// NamespaceSetSessionRecord provides a mock function with given fields: ctx, sessionRecord, tenantID -func (_m *Store) NamespaceSetSessionRecord(ctx context.Context, sessionRecord bool, tenantID string) error { - ret := _m.Called(ctx, sessionRecord, tenantID) +// NamespaceUpdateMembership provides a mock function with given fields: ctx, tenantID, member +func (_m *Store) NamespaceUpdateMembership(ctx context.Context, tenantID string, member *models.Member) error { + ret := _m.Called(ctx, tenantID, member) + + if len(ret) == 0 { + panic("no return value specified for NamespaceUpdateMembership") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, bool, string) error); ok { - r0 = rf(ctx, sessionRecord, tenantID) + if rf, ok := ret.Get(0).(func(context.Context, string, *models.Member) error); ok { + r0 = rf(ctx, tenantID, member) } else { r0 = ret.Error(0) } @@ -1470,15 +923,21 @@ func (_m *Store) NamespaceSetSessionRecord(ctx context.Context, sessionRecord bo return r0 } -// NamespaceUpdate provides a mock function with given fields: ctx, tenantID, namespace -func (_m *Store) NamespaceUpdate(ctx context.Context, tenantID string, namespace *models.Namespace) error { - ret := _m.Called(ctx, tenantID, namespace) +// Options provides a mock function with no fields +func (_m *Store) Options() store.QueryOptions { + ret := _m.Called() - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, *models.Namespace) error); ok { - r0 = rf(ctx, tenantID, namespace) + if len(ret) == 0 { + panic("no return value specified for Options") + } + + var r0 store.QueryOptions + if rf, ok := ret.Get(0).(func() store.QueryOptions); ok { + r0 = rf() } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(store.QueryOptions) + } } return r0 @@ -1488,6 +947,10 @@ func (_m *Store) NamespaceUpdate(ctx context.Context, tenantID string, namespace func (_m *Store) PrivateKeyCreate(ctx context.Context, key *models.PrivateKey) error { ret := _m.Called(ctx, key) + if len(ret) == 0 { + panic("no return value specified for PrivateKeyCreate") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, *models.PrivateKey) error); ok { r0 = rf(ctx, key) @@ -1502,6 +965,10 @@ func (_m *Store) PrivateKeyCreate(ctx context.Context, key *models.PrivateKey) e func (_m *Store) PrivateKeyGet(ctx context.Context, fingerprint string) (*models.PrivateKey, error) { ret := _m.Called(ctx, fingerprint) + if len(ret) == 0 { + panic("no return value specified for PrivateKeyGet") + } + var r0 *models.PrivateKey var r1 error if rf, ok := ret.Get(0).(func(context.Context, string) (*models.PrivateKey, error)); ok { @@ -1524,47 +991,27 @@ func (_m *Store) PrivateKeyGet(ctx context.Context, fingerprint string) (*models return r0, r1 } -// PublicKeyBulkDeleteTag provides a mock function with given fields: ctx, tenant, tag -func (_m *Store) PublicKeyBulkDeleteTag(ctx context.Context, tenant string, tag string) (int64, error) { - ret := _m.Called(ctx, tenant, tag) - - var r0 int64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (int64, error)); ok { - return rf(ctx, tenant, tag) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string) int64); ok { - r0 = rf(ctx, tenant, tag) - } else { - r0 = ret.Get(0).(int64) - } +// PublicKeyCreate provides a mock function with given fields: ctx, key +func (_m *Store) PublicKeyCreate(ctx context.Context, key *models.PublicKey) (string, error) { + ret := _m.Called(ctx, key) - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, tenant, tag) - } else { - r1 = ret.Error(1) + if len(ret) == 0 { + panic("no return value specified for PublicKeyCreate") } - return r0, r1 -} - -// PublicKeyBulkRenameTag provides a mock function with given fields: ctx, tenant, currentTag, newTag -func (_m *Store) PublicKeyBulkRenameTag(ctx context.Context, tenant string, currentTag string, newTag string) (int64, error) { - ret := _m.Called(ctx, tenant, currentTag, newTag) - - var r0 int64 + var r0 string var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) (int64, error)); ok { - return rf(ctx, tenant, currentTag, newTag) + if rf, ok := ret.Get(0).(func(context.Context, *models.PublicKey) (string, error)); ok { + return rf(ctx, key) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) int64); ok { - r0 = rf(ctx, tenant, currentTag, newTag) + if rf, ok := ret.Get(0).(func(context.Context, *models.PublicKey) string); ok { + r0 = rf(ctx, key) } else { - r0 = ret.Get(0).(int64) + r0 = ret.Get(0).(string) } - if rf, ok := ret.Get(1).(func(context.Context, string, string, string) error); ok { - r1 = rf(ctx, tenant, currentTag, newTag) + if rf, ok := ret.Get(1).(func(context.Context, *models.PublicKey) error); ok { + r1 = rf(ctx, key) } else { r1 = ret.Error(1) } @@ -1572,27 +1019,17 @@ func (_m *Store) PublicKeyBulkRenameTag(ctx context.Context, tenant string, curr return r0, r1 } -// PublicKeyCreate provides a mock function with given fields: ctx, key -func (_m *Store) PublicKeyCreate(ctx context.Context, key *models.PublicKey) error { - ret := _m.Called(ctx, key) +// PublicKeyDelete provides a mock function with given fields: ctx, publicKey +func (_m *Store) PublicKeyDelete(ctx context.Context, publicKey *models.PublicKey) error { + ret := _m.Called(ctx, publicKey) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.PublicKey) error); ok { - r0 = rf(ctx, key) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for PublicKeyDelete") } - return r0 -} - -// PublicKeyDelete provides a mock function with given fields: ctx, fingerprint, tenantID -func (_m *Store) PublicKeyDelete(ctx context.Context, fingerprint string, tenantID string) error { - ret := _m.Called(ctx, fingerprint, tenantID) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { - r0 = rf(ctx, fingerprint, tenantID) + if rf, ok := ret.Get(0).(func(context.Context, *models.PublicKey) error); ok { + r0 = rf(ctx, publicKey) } else { r0 = ret.Error(0) } @@ -1600,91 +1037,43 @@ func (_m *Store) PublicKeyDelete(ctx context.Context, fingerprint string, tenant return r0 } -// PublicKeyGet provides a mock function with given fields: ctx, fingerprint, tenantID -func (_m *Store) PublicKeyGet(ctx context.Context, fingerprint string, tenantID string) (*models.PublicKey, error) { - ret := _m.Called(ctx, fingerprint, tenantID) - - var r0 *models.PublicKey - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.PublicKey, error)); ok { - return rf(ctx, fingerprint, tenantID) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.PublicKey); ok { - r0 = rf(ctx, fingerprint, tenantID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.PublicKey) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, fingerprint, tenantID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// PublicKeyGetTags provides a mock function with given fields: ctx, tenant -func (_m *Store) PublicKeyGetTags(ctx context.Context, tenant string) ([]string, int, error) { - ret := _m.Called(ctx, tenant) - - var r0 []string - var r1 int - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]string, int, error)); ok { - return rf(ctx, tenant) - } - if rf, ok := ret.Get(0).(func(context.Context, string) []string); ok { - r0 = rf(ctx, tenant) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]string) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, string) int); ok { - r1 = rf(ctx, tenant) - } else { - r1 = ret.Get(1).(int) +// PublicKeyList provides a mock function with given fields: ctx, opts +func (_m *Store) PublicKeyList(ctx context.Context, opts ...store.QueryOption) ([]models.PublicKey, int, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) - if rf, ok := ret.Get(2).(func(context.Context, string) error); ok { - r2 = rf(ctx, tenant) - } else { - r2 = ret.Error(2) + if len(ret) == 0 { + panic("no return value specified for PublicKeyList") } - return r0, r1, r2 -} - -// PublicKeyList provides a mock function with given fields: ctx, pagination -func (_m *Store) PublicKeyList(ctx context.Context, pagination paginator.Query) ([]models.PublicKey, int, error) { - ret := _m.Called(ctx, pagination) - var r0 []models.PublicKey var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) ([]models.PublicKey, int, error)); ok { - return rf(ctx, pagination) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) ([]models.PublicKey, int, error)); ok { + return rf(ctx, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) []models.PublicKey); ok { - r0 = rf(ctx, pagination) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) []models.PublicKey); ok { + r0 = rf(ctx, opts...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.PublicKey) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query) int); ok { - r1 = rf(ctx, pagination) + if rf, ok := ret.Get(1).(func(context.Context, ...store.QueryOption) int); ok { + r1 = rf(ctx, opts...) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query) error); ok { - r2 = rf(ctx, pagination) + if rf, ok := ret.Get(2).(func(context.Context, ...store.QueryOption) error); ok { + r2 = rf(ctx, opts...) } else { r2 = ret.Error(2) } @@ -1692,84 +1081,36 @@ func (_m *Store) PublicKeyList(ctx context.Context, pagination paginator.Query) return r0, r1, r2 } -// PublicKeyPullTag provides a mock function with given fields: ctx, tenant, fingerprint, tag -func (_m *Store) PublicKeyPullTag(ctx context.Context, tenant string, fingerprint string, tag string) error { - ret := _m.Called(ctx, tenant, fingerprint, tag) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok { - r0 = rf(ctx, tenant, fingerprint, tag) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// PublicKeyPushTag provides a mock function with given fields: ctx, tenant, fingerprint, tag -func (_m *Store) PublicKeyPushTag(ctx context.Context, tenant string, fingerprint string, tag string) error { - ret := _m.Called(ctx, tenant, fingerprint, tag) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok { - r0 = rf(ctx, tenant, fingerprint, tag) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// PublicKeySetTags provides a mock function with given fields: ctx, tenant, fingerprint, tags -func (_m *Store) PublicKeySetTags(ctx context.Context, tenant string, fingerprint string, tags []string) (int64, int64, error) { - ret := _m.Called(ctx, tenant, fingerprint, tags) - - var r0 int64 - var r1 int64 - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, []string) (int64, int64, error)); ok { - return rf(ctx, tenant, fingerprint, tags) - } - if rf, ok := ret.Get(0).(func(context.Context, string, string, []string) int64); ok { - r0 = rf(ctx, tenant, fingerprint, tags) - } else { - r0 = ret.Get(0).(int64) - } - - if rf, ok := ret.Get(1).(func(context.Context, string, string, []string) int64); ok { - r1 = rf(ctx, tenant, fingerprint, tags) - } else { - r1 = ret.Get(1).(int64) +// PublicKeyResolve provides a mock function with given fields: ctx, resolver, value, opts +func (_m *Store) PublicKeyResolve(ctx context.Context, resolver store.PublicKeyResolver, value string, opts ...store.QueryOption) (*models.PublicKey, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] } + var _ca []interface{} + _ca = append(_ca, ctx, resolver, value) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) - if rf, ok := ret.Get(2).(func(context.Context, string, string, []string) error); ok { - r2 = rf(ctx, tenant, fingerprint, tags) - } else { - r2 = ret.Error(2) + if len(ret) == 0 { + panic("no return value specified for PublicKeyResolve") } - return r0, r1, r2 -} - -// PublicKeyUpdate provides a mock function with given fields: ctx, fingerprint, tenantID, key -func (_m *Store) PublicKeyUpdate(ctx context.Context, fingerprint string, tenantID string, key *models.PublicKeyUpdate) (*models.PublicKey, error) { - ret := _m.Called(ctx, fingerprint, tenantID, key) - var r0 *models.PublicKey var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, *models.PublicKeyUpdate) (*models.PublicKey, error)); ok { - return rf(ctx, fingerprint, tenantID, key) + if rf, ok := ret.Get(0).(func(context.Context, store.PublicKeyResolver, string, ...store.QueryOption) (*models.PublicKey, error)); ok { + return rf(ctx, resolver, value, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, *models.PublicKeyUpdate) *models.PublicKey); ok { - r0 = rf(ctx, fingerprint, tenantID, key) + if rf, ok := ret.Get(0).(func(context.Context, store.PublicKeyResolver, string, ...store.QueryOption) *models.PublicKey); ok { + r0 = rf(ctx, resolver, value, opts...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.PublicKey) } } - if rf, ok := ret.Get(1).(func(context.Context, string, string, *models.PublicKeyUpdate) error); ok { - r1 = rf(ctx, fingerprint, tenantID, key) + if rf, ok := ret.Get(1).(func(context.Context, store.PublicKeyResolver, string, ...store.QueryOption) error); ok { + r1 = rf(ctx, resolver, value, opts...) } else { r1 = ret.Error(1) } @@ -1777,39 +1118,17 @@ func (_m *Store) PublicKeyUpdate(ctx context.Context, fingerprint string, tenant return r0, r1 } -// SessionCreate provides a mock function with given fields: ctx, session -func (_m *Store) SessionCreate(ctx context.Context, session models.Session) (*models.Session, error) { - ret := _m.Called(ctx, session) - - var r0 *models.Session - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, models.Session) (*models.Session, error)); ok { - return rf(ctx, session) - } - if rf, ok := ret.Get(0).(func(context.Context, models.Session) *models.Session); ok { - r0 = rf(ctx, session) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Session) - } - } +// PublicKeyUpdate provides a mock function with given fields: ctx, publicKey +func (_m *Store) PublicKeyUpdate(ctx context.Context, publicKey *models.PublicKey) error { + ret := _m.Called(ctx, publicKey) - if rf, ok := ret.Get(1).(func(context.Context, models.Session) error); ok { - r1 = rf(ctx, session) - } else { - r1 = ret.Error(1) + if len(ret) == 0 { + panic("no return value specified for PublicKeyUpdate") } - return r0, r1 -} - -// SessionCreateRecordFrame provides a mock function with given fields: ctx, uid, recordSession -func (_m *Store) SessionCreateRecordFrame(ctx context.Context, uid models.UID, recordSession *models.RecordedSession) error { - ret := _m.Called(ctx, uid, recordSession) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, *models.RecordedSession) error); ok { - r0 = rf(ctx, uid, recordSession) + if rf, ok := ret.Get(0).(func(context.Context, *models.PublicKey) error); ok { + r0 = rf(ctx, publicKey) } else { r0 = ret.Error(0) } @@ -1817,117 +1136,107 @@ func (_m *Store) SessionCreateRecordFrame(ctx context.Context, uid models.UID, r return r0 } -// SessionDeleteActives provides a mock function with given fields: ctx, uid -func (_m *Store) SessionDeleteActives(ctx context.Context, uid models.UID) error { - ret := _m.Called(ctx, uid) +// SessionCreate provides a mock function with given fields: ctx, session +func (_m *Store) SessionCreate(ctx context.Context, session models.Session) (string, error) { + ret := _m.Called(ctx, session) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID) error); ok { - r0 = rf(ctx, uid) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for SessionCreate") } - return r0 -} - -// SessionDeleteRecordFrame provides a mock function with given fields: ctx, uid -func (_m *Store) SessionDeleteRecordFrame(ctx context.Context, uid models.UID) error { - ret := _m.Called(ctx, uid) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID) error); ok { - r0 = rf(ctx, uid) + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, models.Session) (string, error)); ok { + return rf(ctx, session) + } + if rf, ok := ret.Get(0).(func(context.Context, models.Session) string); ok { + r0 = rf(ctx, session) } else { - r0 = ret.Error(0) + r0 = ret.Get(0).(string) } - return r0 -} - -// SessionDeleteRecordFrameByDate provides a mock function with given fields: ctx, lte -func (_m *Store) SessionDeleteRecordFrameByDate(ctx context.Context, lte time.Time) (int64, int64, error) { - ret := _m.Called(ctx, lte) - - var r0 int64 - var r1 int64 - var r2 error - if rf, ok := ret.Get(0).(func(context.Context, time.Time) (int64, int64, error)); ok { - return rf(ctx, lte) - } - if rf, ok := ret.Get(0).(func(context.Context, time.Time) int64); ok { - r0 = rf(ctx, lte) + if rf, ok := ret.Get(1).(func(context.Context, models.Session) error); ok { + r1 = rf(ctx, session) } else { - r0 = ret.Get(0).(int64) + r1 = ret.Error(1) } - if rf, ok := ret.Get(1).(func(context.Context, time.Time) int64); ok { - r1 = rf(ctx, lte) - } else { - r1 = ret.Get(1).(int64) + return r0, r1 +} + +// SessionEventsCreate provides a mock function with given fields: ctx, event +func (_m *Store) SessionEventsCreate(ctx context.Context, event *models.SessionEvent) error { + ret := _m.Called(ctx, event) + + if len(ret) == 0 { + panic("no return value specified for SessionEventsCreate") } - if rf, ok := ret.Get(2).(func(context.Context, time.Time) error); ok { - r2 = rf(ctx, lte) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.SessionEvent) error); ok { + r0 = rf(ctx, event) } else { - r2 = ret.Error(2) + r0 = ret.Error(0) } - return r0, r1, r2 + return r0 } -// SessionGet provides a mock function with given fields: ctx, uid -func (_m *Store) SessionGet(ctx context.Context, uid models.UID) (*models.Session, error) { - ret := _m.Called(ctx, uid) +// SessionEventsDelete provides a mock function with given fields: ctx, uid, seat, event +func (_m *Store) SessionEventsDelete(ctx context.Context, uid models.UID, seat int, event models.SessionEventType) error { + ret := _m.Called(ctx, uid, seat, event) - var r0 *models.Session - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID) (*models.Session, error)); ok { - return rf(ctx, uid) - } - if rf, ok := ret.Get(0).(func(context.Context, models.UID) *models.Session); ok { - r0 = rf(ctx, uid) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Session) - } + if len(ret) == 0 { + panic("no return value specified for SessionEventsDelete") } - if rf, ok := ret.Get(1).(func(context.Context, models.UID) error); ok { - r1 = rf(ctx, uid) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.UID, int, models.SessionEventType) error); ok { + r0 = rf(ctx, uid, seat, event) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// SessionGetRecordFrame provides a mock function with given fields: ctx, uid -func (_m *Store) SessionGetRecordFrame(ctx context.Context, uid models.UID) ([]models.RecordedSession, int, error) { - ret := _m.Called(ctx, uid) +// SessionEventsList provides a mock function with given fields: ctx, uid, seat, event, opts +func (_m *Store) SessionEventsList(ctx context.Context, uid models.UID, seat int, event models.SessionEventType, opts ...store.QueryOption) ([]models.SessionEvent, int, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, uid, seat, event) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for SessionEventsList") + } - var r0 []models.RecordedSession + var r0 []models.SessionEvent var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID) ([]models.RecordedSession, int, error)); ok { - return rf(ctx, uid) + if rf, ok := ret.Get(0).(func(context.Context, models.UID, int, models.SessionEventType, ...store.QueryOption) ([]models.SessionEvent, int, error)); ok { + return rf(ctx, uid, seat, event, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, models.UID) []models.RecordedSession); ok { - r0 = rf(ctx, uid) + if rf, ok := ret.Get(0).(func(context.Context, models.UID, int, models.SessionEventType, ...store.QueryOption) []models.SessionEvent); ok { + r0 = rf(ctx, uid, seat, event, opts...) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]models.RecordedSession) + r0 = ret.Get(0).([]models.SessionEvent) } } - if rf, ok := ret.Get(1).(func(context.Context, models.UID) int); ok { - r1 = rf(ctx, uid) + if rf, ok := ret.Get(1).(func(context.Context, models.UID, int, models.SessionEventType, ...store.QueryOption) int); ok { + r1 = rf(ctx, uid, seat, event, opts...) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, models.UID) error); ok { - r2 = rf(ctx, uid) + if rf, ok := ret.Get(2).(func(context.Context, models.UID, int, models.SessionEventType, ...store.QueryOption) error); ok { + r2 = rf(ctx, uid, seat, event, opts...) } else { r2 = ret.Error(2) } @@ -1935,32 +1244,43 @@ func (_m *Store) SessionGetRecordFrame(ctx context.Context, uid models.UID) ([]m return r0, r1, r2 } -// SessionList provides a mock function with given fields: ctx, pagination -func (_m *Store) SessionList(ctx context.Context, pagination paginator.Query) ([]models.Session, int, error) { - ret := _m.Called(ctx, pagination) +// SessionList provides a mock function with given fields: ctx, opts +func (_m *Store) SessionList(ctx context.Context, opts ...store.QueryOption) ([]models.Session, int, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for SessionList") + } var r0 []models.Session var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) ([]models.Session, int, error)); ok { - return rf(ctx, pagination) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) ([]models.Session, int, error)); ok { + return rf(ctx, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query) []models.Session); ok { - r0 = rf(ctx, pagination) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) []models.Session); ok { + r0 = rf(ctx, opts...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.Session) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query) int); ok { - r1 = rf(ctx, pagination) + if rf, ok := ret.Get(1).(func(context.Context, ...store.QueryOption) int); ok { + r1 = rf(ctx, opts...) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query) error); ok { - r2 = rf(ctx, pagination) + if rf, ok := ret.Get(2).(func(context.Context, ...store.QueryOption) error); ok { + r2 = rf(ctx, opts...) } else { r2 = ret.Error(2) } @@ -1968,41 +1288,54 @@ func (_m *Store) SessionList(ctx context.Context, pagination paginator.Query) ([ return r0, r1, r2 } -// SessionSetAuthenticated provides a mock function with given fields: ctx, uid, authenticated -func (_m *Store) SessionSetAuthenticated(ctx context.Context, uid models.UID, authenticated bool) error { - ret := _m.Called(ctx, uid, authenticated) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, bool) error); ok { - r0 = rf(ctx, uid, authenticated) - } else { - r0 = ret.Error(0) +// SessionResolve provides a mock function with given fields: ctx, resolver, value, opts +func (_m *Store) SessionResolve(ctx context.Context, resolver store.SessionResolver, value string, opts ...store.QueryOption) (*models.Session, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] } + var _ca []interface{} + _ca = append(_ca, ctx, resolver, value) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) - return r0 -} + if len(ret) == 0 { + panic("no return value specified for SessionResolve") + } -// SessionSetLastSeen provides a mock function with given fields: ctx, uid -func (_m *Store) SessionSetLastSeen(ctx context.Context, uid models.UID) error { - ret := _m.Called(ctx, uid) + var r0 *models.Session + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, store.SessionResolver, string, ...store.QueryOption) (*models.Session, error)); ok { + return rf(ctx, resolver, value, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, store.SessionResolver, string, ...store.QueryOption) *models.Session); ok { + r0 = rf(ctx, resolver, value, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Session) + } + } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID) error); ok { - r0 = rf(ctx, uid) + if rf, ok := ret.Get(1).(func(context.Context, store.SessionResolver, string, ...store.QueryOption) error); ok { + r1 = rf(ctx, resolver, value, opts...) } else { - r0 = ret.Error(0) + r1 = ret.Error(1) } - return r0 + return r0, r1 } -// SessionSetRecorded provides a mock function with given fields: ctx, uid, recorded -func (_m *Store) SessionSetRecorded(ctx context.Context, uid models.UID, recorded bool) error { - ret := _m.Called(ctx, uid, recorded) +// SessionUpdate provides a mock function with given fields: ctx, session +func (_m *Store) SessionUpdate(ctx context.Context, session *models.Session) error { + ret := _m.Called(ctx, session) + + if len(ret) == 0 { + panic("no return value specified for SessionUpdate") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, models.UID, bool) error); ok { - r0 = rf(ctx, uid, recorded) + if rf, ok := ret.Get(0).(func(context.Context, *models.Session) error); ok { + r0 = rf(ctx, session) } else { r0 = ret.Error(0) } @@ -2014,6 +1347,10 @@ func (_m *Store) SessionSetRecorded(ctx context.Context, uid models.UID, recorde func (_m *Store) SessionUpdateDeviceUID(ctx context.Context, oldUID models.UID, newUID models.UID) error { ret := _m.Called(ctx, oldUID, newUID) + if len(ret) == 0 { + panic("no return value specified for SessionUpdateDeviceUID") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, models.UID, models.UID) error); ok { r0 = rf(ctx, oldUID, newUID) @@ -2024,23 +1361,29 @@ func (_m *Store) SessionUpdateDeviceUID(ctx context.Context, oldUID models.UID, return r0 } -// TagsDelete provides a mock function with given fields: ctx, tenant, tag -func (_m *Store) TagsDelete(ctx context.Context, tenant string, tag string) (int64, error) { - ret := _m.Called(ctx, tenant, tag) +// SystemGet provides a mock function with given fields: ctx +func (_m *Store) SystemGet(ctx context.Context) (*models.System, error) { + ret := _m.Called(ctx) - var r0 int64 + if len(ret) == 0 { + panic("no return value specified for SystemGet") + } + + var r0 *models.System var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (int64, error)); ok { - return rf(ctx, tenant, tag) + if rf, ok := ret.Get(0).(func(context.Context) (*models.System, error)); ok { + return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context, string, string) int64); ok { - r0 = rf(ctx, tenant, tag) + if rf, ok := ret.Get(0).(func(context.Context) *models.System); ok { + r0 = rf(ctx) } else { - r0 = ret.Get(0).(int64) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.System) + } } - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, tenant, tag) + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) } else { r1 = ret.Error(1) } @@ -2048,32 +1391,54 @@ func (_m *Store) TagsDelete(ctx context.Context, tenant string, tag string) (int return r0, r1 } -// TagsGet provides a mock function with given fields: ctx, tenant -func (_m *Store) TagsGet(ctx context.Context, tenant string) ([]string, int, error) { - ret := _m.Called(ctx, tenant) +// SystemSet provides a mock function with given fields: ctx, system +func (_m *Store) SystemSet(ctx context.Context, system *models.System) error { + ret := _m.Called(ctx, system) + + if len(ret) == 0 { + panic("no return value specified for SystemSet") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.System) error); ok { + r0 = rf(ctx, system) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// TagConflicts provides a mock function with given fields: ctx, tenantID, target +func (_m *Store) TagConflicts(ctx context.Context, tenantID string, target *models.TagConflicts) ([]string, bool, error) { + ret := _m.Called(ctx, tenantID, target) + + if len(ret) == 0 { + panic("no return value specified for TagConflicts") + } var r0 []string - var r1 int + var r1 bool var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]string, int, error)); ok { - return rf(ctx, tenant) + if rf, ok := ret.Get(0).(func(context.Context, string, *models.TagConflicts) ([]string, bool, error)); ok { + return rf(ctx, tenantID, target) } - if rf, ok := ret.Get(0).(func(context.Context, string) []string); ok { - r0 = rf(ctx, tenant) + if rf, ok := ret.Get(0).(func(context.Context, string, *models.TagConflicts) []string); ok { + r0 = rf(ctx, tenantID, target) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]string) } } - if rf, ok := ret.Get(1).(func(context.Context, string) int); ok { - r1 = rf(ctx, tenant) + if rf, ok := ret.Get(1).(func(context.Context, string, *models.TagConflicts) bool); ok { + r1 = rf(ctx, tenantID, target) } else { - r1 = ret.Get(1).(int) + r1 = ret.Get(1).(bool) } - if rf, ok := ret.Get(2).(func(context.Context, string) error); ok { - r2 = rf(ctx, tenant) + if rf, ok := ret.Get(2).(func(context.Context, string, *models.TagConflicts) error); ok { + r2 = rf(ctx, tenantID, target) } else { r2 = ret.Error(2) } @@ -2081,23 +1446,27 @@ func (_m *Store) TagsGet(ctx context.Context, tenant string) ([]string, int, err return r0, r1, r2 } -// TagsRename provides a mock function with given fields: ctx, tenant, oldTag, newTag -func (_m *Store) TagsRename(ctx context.Context, tenant string, oldTag string, newTag string) (int64, error) { - ret := _m.Called(ctx, tenant, oldTag, newTag) +// TagCreate provides a mock function with given fields: ctx, tag +func (_m *Store) TagCreate(ctx context.Context, tag *models.Tag) (string, error) { + ret := _m.Called(ctx, tag) - var r0 int64 + if len(ret) == 0 { + panic("no return value specified for TagCreate") + } + + var r0 string var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) (int64, error)); ok { - return rf(ctx, tenant, oldTag, newTag) + if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) (string, error)); ok { + return rf(ctx, tag) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string) int64); ok { - r0 = rf(ctx, tenant, oldTag, newTag) + if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) string); ok { + r0 = rf(ctx, tag) } else { - r0 = ret.Get(0).(int64) + r0 = ret.Get(0).(string) } - if rf, ok := ret.Get(1).(func(context.Context, string, string, string) error); ok { - r1 = rf(ctx, tenant, oldTag, newTag) + if rf, ok := ret.Get(1).(func(context.Context, *models.Tag) error); ok { + r1 = rf(ctx, tag) } else { r1 = ret.Error(1) } @@ -2105,13 +1474,17 @@ func (_m *Store) TagsRename(ctx context.Context, tenant string, oldTag string, n return r0, r1 } -// UpdateCodes provides a mock function with given fields: ctx, id, codes -func (_m *Store) UpdateCodes(ctx context.Context, id string, codes []string) error { - ret := _m.Called(ctx, id, codes) +// TagDelete provides a mock function with given fields: ctx, tag +func (_m *Store) TagDelete(ctx context.Context, tag *models.Tag) error { + ret := _m.Called(ctx, tag) + + if len(ret) == 0 { + panic("no return value specified for TagDelete") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, []string) error); ok { - r0 = rf(ctx, id, codes) + if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) error); ok { + r0 = rf(ctx, tag) } else { r0 = ret.Error(0) } @@ -2119,41 +1492,68 @@ func (_m *Store) UpdateCodes(ctx context.Context, id string, codes []string) err return r0 } -// UserCreate provides a mock function with given fields: ctx, user -func (_m *Store) UserCreate(ctx context.Context, user *models.User) error { - ret := _m.Called(ctx, user) +// TagList provides a mock function with given fields: ctx, opts +func (_m *Store) TagList(ctx context.Context, opts ...store.QueryOption) ([]models.Tag, int, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.User) error); ok { - r0 = rf(ctx, user) - } else { - r0 = ret.Error(0) + if len(ret) == 0 { + panic("no return value specified for TagList") } - return r0 -} + var r0 []models.Tag + var r1 int + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) ([]models.Tag, int, error)); ok { + return rf(ctx, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) []models.Tag); ok { + r0 = rf(ctx, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.Tag) + } + } -// UserCreateToken provides a mock function with given fields: ctx, token -func (_m *Store) UserCreateToken(ctx context.Context, token *models.UserTokenRecover) error { - ret := _m.Called(ctx, token) + if rf, ok := ret.Get(1).(func(context.Context, ...store.QueryOption) int); ok { + r1 = rf(ctx, opts...) + } else { + r1 = ret.Get(1).(int) + } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.UserTokenRecover) error); ok { - r0 = rf(ctx, token) + if rf, ok := ret.Get(2).(func(context.Context, ...store.QueryOption) error); ok { + r2 = rf(ctx, opts...) } else { - r0 = ret.Error(0) + r2 = ret.Error(2) } - return r0 + return r0, r1, r2 } -// UserDelete provides a mock function with given fields: ctx, id -func (_m *Store) UserDelete(ctx context.Context, id string) error { - ret := _m.Called(ctx, id) +// TagPullFromTarget provides a mock function with given fields: ctx, id, target, targetIDs +func (_m *Store) TagPullFromTarget(ctx context.Context, id string, target store.TagTarget, targetIDs ...string) error { + _va := make([]interface{}, len(targetIDs)) + for _i := range targetIDs { + _va[_i] = targetIDs[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, id, target) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for TagPullFromTarget") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, id) + if rf, ok := ret.Get(0).(func(context.Context, string, store.TagTarget, ...string) error); ok { + r0 = rf(ctx, id, target, targetIDs...) } else { r0 = ret.Error(0) } @@ -2161,13 +1561,17 @@ func (_m *Store) UserDelete(ctx context.Context, id string) error { return r0 } -// UserDeleteTokens provides a mock function with given fields: ctx, id -func (_m *Store) UserDeleteTokens(ctx context.Context, id string) error { - ret := _m.Called(ctx, id) +// TagPushToTarget provides a mock function with given fields: ctx, id, target, targetID +func (_m *Store) TagPushToTarget(ctx context.Context, id string, target store.TagTarget, targetID string) error { + ret := _m.Called(ctx, id, target, targetID) + + if len(ret) == 0 { + panic("no return value specified for TagPushToTarget") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, id) + if rf, ok := ret.Get(0).(func(context.Context, string, store.TagTarget, string) error); ok { + r0 = rf(ctx, id, target, targetID) } else { r0 = ret.Error(0) } @@ -2175,25 +1579,36 @@ func (_m *Store) UserDeleteTokens(ctx context.Context, id string) error { return r0 } -// UserDetachInfo provides a mock function with given fields: ctx, id -func (_m *Store) UserDetachInfo(ctx context.Context, id string) (map[string][]*models.Namespace, error) { - ret := _m.Called(ctx, id) +// TagResolve provides a mock function with given fields: ctx, resolver, value, opts +func (_m *Store) TagResolve(ctx context.Context, resolver store.TagResolver, value string, opts ...store.QueryOption) (*models.Tag, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, resolver, value) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for TagResolve") + } - var r0 map[string][]*models.Namespace + var r0 *models.Tag var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (map[string][]*models.Namespace, error)); ok { - return rf(ctx, id) + if rf, ok := ret.Get(0).(func(context.Context, store.TagResolver, string, ...store.QueryOption) (*models.Tag, error)); ok { + return rf(ctx, resolver, value, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, string) map[string][]*models.Namespace); ok { - r0 = rf(ctx, id) + if rf, ok := ret.Get(0).(func(context.Context, store.TagResolver, string, ...store.QueryOption) *models.Tag); ok { + r0 = rf(ctx, resolver, value, opts...) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(map[string][]*models.Namespace) + r0 = ret.Get(0).(*models.Tag) } } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, id) + if rf, ok := ret.Get(1).(func(context.Context, store.TagResolver, string, ...store.QueryOption) error); ok { + r1 = rf(ctx, resolver, value, opts...) } else { r1 = ret.Error(1) } @@ -2201,58 +1616,72 @@ func (_m *Store) UserDetachInfo(ctx context.Context, id string) (map[string][]*m return r0, r1 } -// UserGetByEmail provides a mock function with given fields: ctx, email -func (_m *Store) UserGetByEmail(ctx context.Context, email string) (*models.User, error) { - ret := _m.Called(ctx, email) +// TagUpdate provides a mock function with given fields: ctx, tag +func (_m *Store) TagUpdate(ctx context.Context, tag *models.Tag) error { + ret := _m.Called(ctx, tag) - var r0 *models.User - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.User, error)); ok { - return rf(ctx, email) + if len(ret) == 0 { + panic("no return value specified for TagUpdate") } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.User); ok { - r0 = rf(ctx, email) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) error); ok { + r0 = rf(ctx, tag) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.User) - } + r0 = ret.Error(0) } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, email) + return r0 +} + +// TunnelUpdateDeviceUID provides a mock function with given fields: ctx, tenantID, oldUID, newUID +func (_m *Store) TunnelUpdateDeviceUID(ctx context.Context, tenantID string, oldUID string, newUID string) error { + ret := _m.Called(ctx, tenantID, oldUID, newUID) + + if len(ret) == 0 { + panic("no return value specified for TunnelUpdateDeviceUID") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok { + r0 = rf(ctx, tenantID, oldUID, newUID) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// UserGetByID provides a mock function with given fields: ctx, id, ns -func (_m *Store) UserGetByID(ctx context.Context, id string, ns bool) (*models.User, int, error) { - ret := _m.Called(ctx, id, ns) +// UserConflicts provides a mock function with given fields: ctx, target +func (_m *Store) UserConflicts(ctx context.Context, target *models.UserConflicts) ([]string, bool, error) { + ret := _m.Called(ctx, target) - var r0 *models.User - var r1 int + if len(ret) == 0 { + panic("no return value specified for UserConflicts") + } + + var r0 []string + var r1 bool var r2 error - if rf, ok := ret.Get(0).(func(context.Context, string, bool) (*models.User, int, error)); ok { - return rf(ctx, id, ns) + if rf, ok := ret.Get(0).(func(context.Context, *models.UserConflicts) ([]string, bool, error)); ok { + return rf(ctx, target) } - if rf, ok := ret.Get(0).(func(context.Context, string, bool) *models.User); ok { - r0 = rf(ctx, id, ns) + if rf, ok := ret.Get(0).(func(context.Context, *models.UserConflicts) []string); ok { + r0 = rf(ctx, target) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.User) + r0 = ret.Get(0).([]string) } } - if rf, ok := ret.Get(1).(func(context.Context, string, bool) int); ok { - r1 = rf(ctx, id, ns) + if rf, ok := ret.Get(1).(func(context.Context, *models.UserConflicts) bool); ok { + r1 = rf(ctx, target) } else { - r1 = ret.Get(1).(int) + r1 = ret.Get(1).(bool) } - if rf, ok := ret.Get(2).(func(context.Context, string, bool) error); ok { - r2 = rf(ctx, id, ns) + if rf, ok := ret.Get(2).(func(context.Context, *models.UserConflicts) error); ok { + r2 = rf(ctx, target) } else { r2 = ret.Error(2) } @@ -2260,25 +1689,27 @@ func (_m *Store) UserGetByID(ctx context.Context, id string, ns bool) (*models.U return r0, r1, r2 } -// UserGetByUsername provides a mock function with given fields: ctx, username -func (_m *Store) UserGetByUsername(ctx context.Context, username string) (*models.User, error) { - ret := _m.Called(ctx, username) +// UserCreate provides a mock function with given fields: ctx, user +func (_m *Store) UserCreate(ctx context.Context, user *models.User) (string, error) { + ret := _m.Called(ctx, user) - var r0 *models.User + if len(ret) == 0 { + panic("no return value specified for UserCreate") + } + + var r0 string var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.User, error)); ok { - return rf(ctx, username) + if rf, ok := ret.Get(0).(func(context.Context, *models.User) (string, error)); ok { + return rf(ctx, user) } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.User); ok { - r0 = rf(ctx, username) + if rf, ok := ret.Get(0).(func(context.Context, *models.User) string); ok { + r0 = rf(ctx, user) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.User) - } + r0 = ret.Get(0).(string) } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, username) + if rf, ok := ret.Get(1).(func(context.Context, *models.User) error); ok { + r1 = rf(ctx, user) } else { r1 = ret.Error(1) } @@ -2286,20 +1717,42 @@ func (_m *Store) UserGetByUsername(ctx context.Context, username string) (*model return r0, r1 } -// UserGetToken provides a mock function with given fields: ctx, id -func (_m *Store) UserGetToken(ctx context.Context, id string) (*models.UserTokenRecover, error) { +// UserDelete provides a mock function with given fields: ctx, user +func (_m *Store) UserDelete(ctx context.Context, user *models.User) error { + ret := _m.Called(ctx, user) + + if len(ret) == 0 { + panic("no return value specified for UserDelete") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.User) error); ok { + r0 = rf(ctx, user) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UserGetInfo provides a mock function with given fields: ctx, id +func (_m *Store) UserGetInfo(ctx context.Context, id string) (*models.UserInfo, error) { ret := _m.Called(ctx, id) - var r0 *models.UserTokenRecover + if len(ret) == 0 { + panic("no return value specified for UserGetInfo") + } + + var r0 *models.UserInfo var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*models.UserTokenRecover, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) (*models.UserInfo, error)); ok { return rf(ctx, id) } - if rf, ok := ret.Get(0).(func(context.Context, string) *models.UserTokenRecover); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) *models.UserInfo); ok { r0 = rf(ctx, id) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.UserTokenRecover) + r0 = ret.Get(0).(*models.UserInfo) } } @@ -2312,32 +1765,71 @@ func (_m *Store) UserGetToken(ctx context.Context, id string) (*models.UserToken return r0, r1 } -// UserList provides a mock function with given fields: ctx, pagination, filters -func (_m *Store) UserList(ctx context.Context, pagination paginator.Query, filters []models.Filter) ([]models.User, int, error) { - ret := _m.Called(ctx, pagination, filters) +// UserInvitationsUpsert provides a mock function with given fields: ctx, email +func (_m *Store) UserInvitationsUpsert(ctx context.Context, email string) (string, error) { + ret := _m.Called(ctx, email) + + if len(ret) == 0 { + panic("no return value specified for UserInvitationsUpsert") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (string, error)); ok { + return rf(ctx, email) + } + if rf, ok := ret.Get(0).(func(context.Context, string) string); ok { + r0 = rf(ctx, email) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, email) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UserList provides a mock function with given fields: ctx, opts +func (_m *Store) UserList(ctx context.Context, opts ...store.QueryOption) ([]models.User, int, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for UserList") + } var r0 []models.User var r1 int var r2 error - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, []models.Filter) ([]models.User, int, error)); ok { - return rf(ctx, pagination, filters) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) ([]models.User, int, error)); ok { + return rf(ctx, opts...) } - if rf, ok := ret.Get(0).(func(context.Context, paginator.Query, []models.Filter) []models.User); ok { - r0 = rf(ctx, pagination, filters) + if rf, ok := ret.Get(0).(func(context.Context, ...store.QueryOption) []models.User); ok { + r0 = rf(ctx, opts...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.User) } } - if rf, ok := ret.Get(1).(func(context.Context, paginator.Query, []models.Filter) int); ok { - r1 = rf(ctx, pagination, filters) + if rf, ok := ret.Get(1).(func(context.Context, ...store.QueryOption) int); ok { + r1 = rf(ctx, opts...) } else { r1 = ret.Get(1).(int) } - if rf, ok := ret.Get(2).(func(context.Context, paginator.Query, []models.Filter) error); ok { - r2 = rf(ctx, pagination, filters) + if rf, ok := ret.Get(2).(func(context.Context, ...store.QueryOption) error); ok { + r2 = rf(ctx, opts...) } else { r2 = ret.Error(2) } @@ -2345,41 +1837,54 @@ func (_m *Store) UserList(ctx context.Context, pagination paginator.Query, filte return r0, r1, r2 } -// UserUpdateAccountStatus provides a mock function with given fields: ctx, id -func (_m *Store) UserUpdateAccountStatus(ctx context.Context, id string) error { - ret := _m.Called(ctx, id) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, id) - } else { - r0 = ret.Error(0) +// UserResolve provides a mock function with given fields: ctx, resolver, value, opts +func (_m *Store) UserResolve(ctx context.Context, resolver store.UserResolver, value string, opts ...store.QueryOption) (*models.User, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] } + var _ca []interface{} + _ca = append(_ca, ctx, resolver, value) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) - return r0 -} + if len(ret) == 0 { + panic("no return value specified for UserResolve") + } -// UserUpdateData provides a mock function with given fields: ctx, id, user -func (_m *Store) UserUpdateData(ctx context.Context, id string, user models.User) error { - ret := _m.Called(ctx, id, user) + var r0 *models.User + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, store.UserResolver, string, ...store.QueryOption) (*models.User, error)); ok { + return rf(ctx, resolver, value, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, store.UserResolver, string, ...store.QueryOption) *models.User); ok { + r0 = rf(ctx, resolver, value, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.User) + } + } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, models.User) error); ok { - r0 = rf(ctx, id, user) + if rf, ok := ret.Get(1).(func(context.Context, store.UserResolver, string, ...store.QueryOption) error); ok { + r1 = rf(ctx, resolver, value, opts...) } else { - r0 = ret.Error(0) + r1 = ret.Error(1) } - return r0 + return r0, r1 } -// UserUpdateFromAdmin provides a mock function with given fields: ctx, name, username, email, password, id -func (_m *Store) UserUpdateFromAdmin(ctx context.Context, name string, username string, email string, password string, id string) error { - ret := _m.Called(ctx, name, username, email, password, id) +// UserUpdate provides a mock function with given fields: ctx, user +func (_m *Store) UserUpdate(ctx context.Context, user *models.User) error { + ret := _m.Called(ctx, user) + + if len(ret) == 0 { + panic("no return value specified for UserUpdate") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, string) error); ok { - r0 = rf(ctx, name, username, email, password, id) + if rf, ok := ret.Get(0).(func(context.Context, *models.User) error); ok { + r0 = rf(ctx, user) } else { r0 = ret.Error(0) } @@ -2387,13 +1892,17 @@ func (_m *Store) UserUpdateFromAdmin(ctx context.Context, name string, username return r0 } -// UserUpdatePassword provides a mock function with given fields: ctx, newPassword, id -func (_m *Store) UserUpdatePassword(ctx context.Context, newPassword string, id string) error { - ret := _m.Called(ctx, newPassword, id) +// WithTransaction provides a mock function with given fields: ctx, cb +func (_m *Store) WithTransaction(ctx context.Context, cb store.TransactionCb) error { + ret := _m.Called(ctx, cb) + + if len(ret) == 0 { + panic("no return value specified for WithTransaction") + } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { - r0 = rf(ctx, newPassword, id) + if rf, ok := ret.Get(0).(func(context.Context, store.TransactionCb) error); ok { + r0 = rf(ctx, cb) } else { r0 = ret.Error(0) } diff --git a/api/store/mongo/announcement.go b/api/store/mongo/announcement.go deleted file mode 100644 index 01c90dc39d6..00000000000 --- a/api/store/mongo/announcement.go +++ /dev/null @@ -1,82 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mongo/queries" - "github.com/shellhub-io/shellhub/pkg/api/order" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/models" - "go.mongodb.org/mongo-driver/bson" -) - -func (s *Store) AnnouncementList(ctx context.Context, pagination paginator.Query, order order.Query) ([]models.AnnouncementShort, int, error) { - query := []bson.M{} - - queryCount := append(query, bson.M{"$count": "count"}) - count, err := AggregateCount(ctx, s.db.Collection("announcements"), queryCount) - if err != nil { - return nil, 0, FromMongoError(err) - } - - query = append(query, queries.BuildOrderQuery(order, "date")...) - query = append(query, queries.BuildPaginationQuery(pagination)...) - - cursor, err := s.db.Collection("announcements").Aggregate(ctx, query) - if err != nil { - return nil, 0, FromMongoError(err) - } - - var announcements []models.AnnouncementShort - if err := cursor.All(ctx, &announcements); err != nil { - return nil, 0, FromMongoError(err) - } - - return announcements, count, nil -} - -func (s *Store) AnnouncementGet(ctx context.Context, uuid string) (*models.Announcement, error) { - ann := new(models.Announcement) - - err := s.db.Collection("announcements").FindOne(ctx, bson.M{"uuid": uuid}).Decode(&ann) - if err != nil { - return nil, FromMongoError(err) - } - - return ann, nil -} - -func (s *Store) AnnouncementCreate(ctx context.Context, announcement *models.Announcement) error { - if _, err := s.db.Collection("announcements").InsertOne(ctx, announcement); err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) AnnouncementUpdate(ctx context.Context, announcement *models.Announcement) error { - result, err := s.db.Collection("announcements").UpdateOne(ctx, bson.M{"uuid": announcement.UUID}, bson.M{"$set": bson.M{"title": announcement.Title, "content": announcement.Content}}) - if err != nil { - return FromMongoError(err) - } - - if result.MatchedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) AnnouncementDelete(ctx context.Context, uuid string) error { - result, err := s.db.Collection("announcements").DeleteOne(ctx, bson.M{"uuid": uuid}) - if err != nil { - return FromMongoError(err) - } - - if result.DeletedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} diff --git a/api/store/mongo/announcement_test.go b/api/store/mongo/announcement_test.go deleted file mode 100644 index ad8bbcd9772..00000000000 --- a/api/store/mongo/announcement_test.go +++ /dev/null @@ -1,320 +0,0 @@ -package mongo - -import ( - "context" - "testing" - "time" - - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/order" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/assert" -) - -func TestAnnouncementList(t *testing.T) { - type Expected struct { - ann []models.AnnouncementShort - len int - err error - } - - cases := []struct { - description string - page paginator.Query - order order.Query - fixtures []string - expected Expected - }{ - { - description: "succeeds when announcement list is empty", - page: paginator.Query{Page: -1, PerPage: -1}, - order: order.Query{OrderBy: order.Asc}, - fixtures: []string{}, - expected: Expected{ - ann: nil, - len: 0, - err: nil, - }, - }, - { - description: "succeeds when announcement list is not empty", - page: paginator.Query{Page: -1, PerPage: -1}, - order: order.Query{OrderBy: order.Asc}, - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: Expected{ - ann: []models.AnnouncementShort{ - { - Date: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4000-0000-000000000000", - Title: "title-0", - }, - { - Date: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4001-0000-000000000000", - Title: "title-1", - }, - { - Date: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4002-0000-000000000000", - Title: "title-2", - }, - { - Date: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4003-0000-000000000000", - Title: "title-3", - }, - }, - len: 4, - err: nil, - }, - }, - { - description: "succeeds when announcement list is not empty and page and page size is limited", - page: paginator.Query{Page: 2, PerPage: 2}, - order: order.Query{OrderBy: order.Asc}, - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: Expected{ - ann: []models.AnnouncementShort{ - { - Date: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4002-0000-000000000000", - Title: "title-2", - }, - { - Date: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4003-0000-000000000000", - Title: "title-3", - }, - }, - len: 4, - err: nil, - }, - }, - { - description: "succeeds when announcement list is not empty and order is desc", - page: paginator.Query{Page: -1, PerPage: -1}, - order: order.Query{OrderBy: order.Desc}, - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: Expected{ - ann: []models.AnnouncementShort{ - { - Date: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4003-0000-000000000000", - Title: "title-3", - }, - { - Date: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4002-0000-000000000000", - Title: "title-2", - }, - { - Date: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4001-0000-000000000000", - Title: "title-1", - }, - { - Date: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4000-0000-000000000000", - Title: "title-0", - }, - }, - len: 4, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - ann, count, err := mongostore.AnnouncementList(context.TODO(), tc.page, tc.order) - assert.Equal(t, tc.expected, Expected{ann: ann, len: count, err: err}) - }) - } -} - -func TestAnnouncementGet(t *testing.T) { - type Expected struct { - ann *models.Announcement - err error - } - - cases := []struct { - description string - uuid string - fixtures []string - expected Expected - }{ - { - description: "fails when announcement is not found", - uuid: "nonexistent", - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: Expected{ - ann: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "succeeds when announcement is found", - uuid: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: Expected{ - ann: &models.Announcement{ - Date: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UUID: "00000000-0000-4000-0000-000000000000", - Title: "title-0", - Content: "content-0", - }, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - ann, err := mongostore.AnnouncementGet(context.TODO(), tc.uuid) - assert.Equal(t, tc.expected, Expected{ann: ann, err: err}) - }) - } -} - -func TestAnnouncementCreate(t *testing.T) { - cases := []struct { - description string - announcement *models.Announcement - fixtures []string - expected error - }{ - { - description: "succeeds when data is valid", - announcement: &models.Announcement{ - UUID: "00000000-0000-40004-0000-000000000000", - Title: "title", - Content: "content", - }, - fixtures: []string{}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - store := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := store.AnnouncementCreate(context.TODO(), tc.announcement) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestAnnouncementUpdate(t *testing.T) { - cases := []struct { - description string - ann *models.Announcement - fixtures []string - expected error - }{ - { - description: "fails when announcement is not found", - ann: &models.Announcement{ - UUID: "nonexistent", - Title: "edited title", - Content: "edited content", - }, - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when announcement is found", - ann: &models.Announcement{ - UUID: "00000000-0000-4000-0000-000000000000", - Title: "edited title", - Content: "edited content", - }, - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.AnnouncementUpdate(context.TODO(), tc.ann) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestAnnouncementDelete(t *testing.T) { - cases := []struct { - description string - uuid string - fixtures []string - expected error - }{ - { - description: "fails when announcement is not found", - uuid: "nonexistent", - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when announcement is found", - uuid: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureAnnouncements}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.AnnouncementDelete(context.TODO(), tc.uuid) - assert.Equal(t, tc.expected, err) - }) - } -} diff --git a/api/store/mongo/api-key.go b/api/store/mongo/api-key.go new file mode 100644 index 00000000000..b48219b4477 --- /dev/null +++ b/api/store/mongo/api-key.go @@ -0,0 +1,158 @@ +package mongo + +import ( + "context" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/models" + "go.mongodb.org/mongo-driver/bson" +) + +func (s *Store) APIKeyCreate(ctx context.Context, apiKey *models.APIKey) (string, error) { + now := clock.Now() + apiKey.CreatedAt = now + apiKey.UpdatedAt = now + + res, err := s.db.Collection("api_keys").InsertOne(ctx, apiKey) + if err != nil { + return "", FromMongoError(err) + } + + return res.InsertedID.(string), nil +} + +func (s *Store) APIKeyConflicts(ctx context.Context, tenantID string, target *models.APIKeyConflicts) ([]string, bool, error) { + pipeline := []bson.M{ + { + "$match": bson.M{ + "tenant_id": tenantID, + "$or": []bson.M{ + {"_id": target.ID}, + {"name": target.Name}, + }, + }, + }, + } + + cursor, err := s.db.Collection("api_keys").Aggregate(ctx, pipeline) + if err != nil { + return nil, false, FromMongoError(err) + } + defer cursor.Close(ctx) + + apiKey := new(models.APIKeyConflicts) + conflicts := make([]string, 0) + for cursor.Next(ctx) { + if err := cursor.Decode(&apiKey); err != nil { + return nil, false, FromMongoError(err) + } + + if apiKey.ID == target.ID { + conflicts = append(conflicts, "id") + } + + if apiKey.Name == target.Name { + conflicts = append(conflicts, "name") + } + } + + return conflicts, len(conflicts) > 0, nil +} + +func (s *Store) APIKeyResolve(ctx context.Context, resolver store.APIKeyResolver, value string, opts ...store.QueryOption) (*models.APIKey, error) { + query := []bson.M{} + switch resolver { + case store.APIKeyIDResolver: + query = append(query, bson.M{"$match": bson.M{"_id": value}}) + case store.APIKeyNameResolver: + query = append(query, bson.M{"$match": bson.M{"name": value}}) + } + + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, err + } + } + + cursor, err := s.db.Collection("api_keys").Aggregate(ctx, query) + if err != nil { + return nil, FromMongoError(err) + } + defer cursor.Close(ctx) + + if !cursor.Next(ctx) { + return nil, store.ErrNoDocuments + } + + apiKey := new(models.APIKey) + if err := cursor.Decode(&apiKey); err != nil { + return nil, FromMongoError(err) + } + + return apiKey, nil +} + +func (s *Store) APIKeyList(ctx context.Context, opts ...store.QueryOption) ([]models.APIKey, int, error) { + query := []bson.M{} + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, 0, err + } + } + + count, err := CountAllMatchingDocuments(ctx, s.db.Collection("api_keys"), query) + if err != nil { + return nil, 0, FromMongoError(err) + } + + if count == 0 { + return []models.APIKey{}, 0, nil + } + + cursor, err := s.db.Collection("api_keys").Aggregate(ctx, query) + if err != nil { + return nil, 0, FromMongoError(err) + } + defer cursor.Close(ctx) + + apiKeys := make([]models.APIKey, 0) + for cursor.Next(ctx) { + apiKey := new(models.APIKey) + if err := cursor.Decode(apiKey); err != nil { + return nil, 0, FromMongoError(err) + } + + apiKeys = append(apiKeys, *apiKey) + } + + return apiKeys, count, nil +} + +func (s *Store) APIKeyUpdate(ctx context.Context, apiKey *models.APIKey) error { + apiKey.UpdatedAt = clock.Now() + + res, err := s.db.Collection("api_keys").UpdateOne(ctx, bson.M{"_id": apiKey.ID}, bson.M{"$set": apiKey}) + if err != nil { + return FromMongoError(err) + } + + if res.ModifiedCount < 1 { + return store.ErrNoDocuments + } + + return nil +} + +func (s *Store) APIKeyDelete(ctx context.Context, apiKey *models.APIKey) error { + res, err := s.db.Collection("api_keys").DeleteOne(ctx, bson.M{"_id": apiKey.ID}) + if err != nil { + return FromMongoError(err) + } + + if res.DeletedCount < 1 { + return store.ErrNoDocuments + } + + return nil +} diff --git a/api/store/mongo/api-key_test.go b/api/store/mongo/api-key_test.go new file mode 100644 index 00000000000..68ed2283c7e --- /dev/null +++ b/api/store/mongo/api-key_test.go @@ -0,0 +1,477 @@ +package mongo_test + +import ( + "context" + "testing" + "time" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson" +) + +func TestAPIKeyCreate(t *testing.T) { + type Expected struct { + insertedID string + err error + } + + cases := []struct { + description string + apiKey *models.APIKey + expected Expected + }{ + { + description: "succeeds", + apiKey: &models.APIKey{ + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + ExpiresIn: 0, + }, + expected: Expected{ + insertedID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + err: nil, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + insertedID, err := s.APIKeyCreate(ctx, tc.apiKey) + require.Equal(t, tc.expected, Expected{insertedID, err}) + }) + } +} + +func TestAPIKeyConflicts(t *testing.T) { + type Expected struct { + conflicts []string + ok bool + err error + } + + cases := []struct { + description string + tenantID string + target *models.APIKeyConflicts + fixtures []string + expected Expected + }{ + { + description: "no conflicts when target is empty", + tenantID: "00000000-0000-4000-0000-000000000000", + target: &models.APIKeyConflicts{}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{[]string{}, false, nil}, + }, + { + description: "no conflicts with non existing name", + tenantID: "00000000-0000-4000-0000-000000000000", + target: &models.APIKeyConflicts{Name: "nonexistent"}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{[]string{}, false, nil}, + }, + { + description: "no conflict detected with existing attribute but different tenant id", + tenantID: "nonexistent", + target: &models.APIKeyConflicts{Name: "dev"}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{[]string{}, false, nil}, + }, + { + description: "conflict detected with existing name", + tenantID: "00000000-0000-4000-0000-000000000000", + target: &models.APIKeyConflicts{Name: "dev"}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{[]string{"name"}, true, nil}, + }, + { + description: "conflict detected with existing id", + tenantID: "00000000-0000-4000-0000-000000000000", + target: &models.APIKeyConflicts{ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a"}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{[]string{"id"}, true, nil}, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) + + conflicts, ok, err := s.APIKeyConflicts(ctx, tc.tenantID, tc.target) + require.Equal(t, tc.expected, Expected{conflicts, ok, err}) + }) + } +} + +func TestAPIKeyResolve(t *testing.T) { + type Expected struct { + apiKey *models.APIKey + err error + } + + cases := []struct { + description string + tenantID string + resolver store.APIKeyResolver + value string + fixtures []string + expected Expected + }{ + { + description: "fails when ID does not exist", + tenantID: "00000000-0000-4000-0000-000000000000", + resolver: store.APIKeyIDResolver, + value: "nonexistent-id", + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKey: nil, + err: store.ErrNoDocuments, + }, + }, + { + description: "succeeds resolving API key by ID", + tenantID: "00000000-0000-4000-0000-000000000000", + resolver: store.APIKeyIDResolver, + value: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKey: &models.APIKey{ + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + ExpiresIn: 0, + }, + err: nil, + }, + }, + { + description: "fails when name and tenant ID does not exist", + tenantID: "nonexistent-tenant", + resolver: store.APIKeyNameResolver, + value: "nonexistent", + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKey: nil, + err: store.ErrNoDocuments, + }, + }, + { + description: "fails when name is valid but tenant ID is not", + tenantID: "nonexistent-tenant", + resolver: store.APIKeyNameResolver, + value: "dev", + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKey: nil, + err: store.ErrNoDocuments, + }, + }, + { + description: "fails when tenant ID is valid but name is not", + tenantID: "00000000-0000-4000-0000-000000000000", + resolver: store.APIKeyNameResolver, + value: "nonexistent", + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKey: nil, + err: store.ErrNoDocuments, + }, + }, + { + description: "succeeds resolving API key by name", + tenantID: "00000000-0000-4000-0000-000000000000", + resolver: store.APIKeyNameResolver, + value: "dev", + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKey: &models.APIKey{ + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + ExpiresIn: 0, + }, + err: nil, + }, + }, + { + description: "fails when API key exists but belongs to different tenant", + tenantID: "11111111-1111-4111-1111-111111111111", + resolver: store.APIKeyNameResolver, + value: "dev", + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKey: nil, + err: store.ErrNoDocuments, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) + + apiKey, err := s.APIKeyResolve(ctx, tc.resolver, tc.value, s.Options().InNamespace(tc.tenantID)) + require.Equal(t, tc.expected, Expected{apiKey: apiKey, err: err}) + }) + } +} + +func TestAPIKeyList(t *testing.T) { + type Expected struct { + apiKeys []models.APIKey + count int + err error + } + + cases := []struct { + description string + opts []store.QueryOption + fixtures []string + expected Expected + }{ + { + description: "succeeds when there are no api keys", + opts: []store.QueryOption{s.Options().InNamespace("non-existent"), s.Options().Sort(&query.Sorter{By: "expires_in", Order: query.OrderAsc}), s.Options().Paginate(&query.Paginator{Page: 1, PerPage: 10})}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKeys: []models.APIKey{}, + count: 0, + err: nil, + }, + }, + { + description: "succeeds when there are api keys", + opts: []store.QueryOption{s.Options().InNamespace("00000000-0000-4000-0000-000000000000"), s.Options().Sort(&query.Sorter{By: "expires_in", Order: query.OrderAsc}), s.Options().Paginate(&query.Paginator{Page: 1, PerPage: 10})}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKeys: []models.APIKey{ + { + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + ExpiresIn: 0, + }, + { + ID: "a1b2c73ea41f70870c035283336d72228118213ed03ec78043ffee48d827af11", + Name: "prod", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "operator", + CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + ExpiresIn: 10, + }, + }, + count: 2, + err: nil, + }, + }, + { + description: "succeeds when there are api keys and pagination", + opts: []store.QueryOption{s.Options().InNamespace("00000000-0000-4000-0000-000000000000"), s.Options().Sort(&query.Sorter{By: "expires_in", Order: query.OrderAsc}), s.Options().Paginate(&query.Paginator{Page: 1, PerPage: 1})}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKeys: []models.APIKey{ + { + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + ExpiresIn: 0, + }, + }, + count: 2, + err: nil, + }, + }, + { + description: "succeeds when there are api keys and sorter", + opts: []store.QueryOption{s.Options().InNamespace("00000000-0000-4000-0000-000000000000"), s.Options().Sort(&query.Sorter{By: "expires_in", Order: query.OrderDesc}), s.Options().Paginate(&query.Paginator{Page: 1, PerPage: 10})}, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + apiKeys: []models.APIKey{ + { + ID: "a1b2c73ea41f70870c035283336d72228118213ed03ec78043ffee48d827af11", + Name: "prod", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "operator", + CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + ExpiresIn: 10, + }, + { + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + ExpiresIn: 0, + }, + }, + count: 2, + err: nil, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) + + apiKeys, count, err := s.APIKeyList(ctx, tc.opts...) + require.Equal(t, tc.expected, Expected{apiKeys, count, err}) + }) + } +} + +func TestAPIKeyUpdate(t *testing.T) { + type Expected struct { + updatedName string + err error + } + + cases := []struct { + description string + apiKey *models.APIKey + fixtures []string + expected Expected + }{ + { + description: "fails when API key does not exist", + apiKey: &models.APIKey{ + ID: "nonexistent-id", + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "nonexistent", + }, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + updatedName: "", + err: store.ErrNoDocuments, + }, + }, + { + description: "succeeds when API key exists", + apiKey: &models.APIKey{ + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + Name: "updated-dev", + CreatedBy: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + Role: "admin", + ExpiresIn: 0, + }, + fixtures: []string{fixtureAPIKeys}, + expected: Expected{ + updatedName: "updated-dev", + err: nil, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) + + err := s.APIKeyUpdate(ctx, tc.apiKey) + if tc.expected.err != nil { + require.Equal(t, tc.expected.err, err) + + return + } + + require.NoError(t, err) + + apiKey := new(models.APIKey) + require.NoError(t, db.Collection("api_keys").FindOne(ctx, bson.M{"_id": tc.apiKey.ID}).Decode(apiKey)) + require.Equal(t, tc.expected.updatedName, apiKey.Name) + require.WithinDuration(t, time.Now(), apiKey.UpdatedAt, 1*time.Second) + }) + } +} + +func TestAPIKeyDelete(t *testing.T) { + cases := []struct { + description string + apiKey *models.APIKey + fixtures []string + expected error + }{ + { + description: "fails when API key does not exist", + apiKey: &models.APIKey{ + ID: "nonexistent-id", + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "nonexistent", + }, + fixtures: []string{fixtureAPIKeys}, + expected: store.ErrNoDocuments, + }, + { + description: "succeeds when API key exists", + apiKey: &models.APIKey{ + ID: "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a", + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "dev", + }, + fixtures: []string{fixtureAPIKeys}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) + + err := s.APIKeyDelete(ctx, tc.apiKey) + require.Equal(t, tc.expected, err) + + if tc.expected == nil { + count, err := db.Collection("api_keys").CountDocuments(ctx, bson.M{"_id": tc.apiKey.ID}) + require.NoError(t, err) + require.Equal(t, int64(0), count) + } + }) + } +} diff --git a/api/store/mongo/device.go b/api/store/mongo/device.go index c6ff94fb71e..33cb5127af1 100644 --- a/api/store/mongo/device.go +++ b/api/store/mongo/device.go @@ -1,32 +1,20 @@ package mongo import ( - "context" - "crypto/md5" - "fmt" + "context" //nolint:gosec "strings" "time" - "github.com/shellhub-io/shellhub/api/pkg/gateway" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mongo/queries" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" "github.com/sirupsen/logrus" "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" - "go.mongodb.org/mongo-driver/mongo/writeconcern" ) // DeviceList returns a list of devices based on the given filters, pagination and sorting. -func (s *Store) DeviceList(ctx context.Context, pagination paginator.Query, filters []models.Filter, status models.DeviceStatus, sort string, order string, mode store.DeviceListMode) ([]models.Device, int, error) { - queryMatch, err := queries.BuildFilterQuery(filters) - if err != nil { - return nil, 0, FromMongoError(err) - } - +func (s *Store) DeviceList(ctx context.Context, acceptable store.DeviceAcceptable, opts ...store.QueryOption) ([]models.Device, int, error) { query := []bson.M{ { "$match": bson.M{ @@ -36,109 +24,73 @@ func (s *Store) DeviceList(ctx context.Context, pagination paginator.Query, filt }, }, { - "$lookup": bson.M{ - "from": "connected_devices", - "localField": "uid", - "foreignField": "uid", - "as": "online", + "$addFields": bson.M{ + "online": bson.M{ + "$cond": bson.M{ + "if": bson.M{ + "$and": bson.A{ + bson.M{"$eq": bson.A{"$disconnected_at", nil}}, + bson.M{"$gt": bson.A{"$last_seen", primitive.NewDateTimeFromTime(time.Now().Add(-2 * time.Minute))}}, + }, + }, + "then": true, + "else": false, + }, + }, }, }, { - "$addFields": bson.M{ - "online": bson.M{"$anyElementTrue": []interface{}{"$online"}}, + "$lookup": bson.M{ + "from": "tags", + "localField": "tag_ids", + "foreignField": "_id", + "as": "tags", }, }, } - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, 0, err + } + } + + // When the listing mode is [store.DeviceListModeMaxDeviceReached], we should evaluate the `removed_devices` + // collection to check its `accetable` status. + switch acceptable { + case store.DeviceAcceptableFromRemoved: query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, + "$addFields": bson.M{ + "acceptable": bson.M{ + "$eq": bson.A{"$status", models.DeviceStatusRemoved}, + }, }, }) - } - - if status != "" { - query = append([]bson.M{{ - "$match": bson.M{ - "status": status, + case store.DeviceAcceptableAsFalse: + query = append(query, bson.M{ + "$addFields": bson.M{ + "acceptable": false, }, - }}, query...) - - // As we have added to device the field called "acceptable" we needed, also, to add the correct value to it. - // The value of "acceptable" is based on the device status and the list mode. If the list status is "accepted" - // we need to add the field "acceptable" with the value "false", because the device is already accepted. - // Otherwise, if the list status is "pending" or "rejected" we evaluate the list mode. When it is - // store.DeviceListModeMaxDeviceReached we need to check if the device is in the removed devices list. - // If it is, the device is only acceptable if it is in the removed devices list. Otherwise, the device is - // unacceptable. - switch status { - case models.DeviceStatusAccepted: - query = append(query, bson.M{ - "$addFields": bson.M{ - "acceptable": false, - }, - }) - case models.DeviceStatusPending, models.DeviceStatusRejected: - switch mode { - case store.DeviceListModeMaxDeviceReached: - query = append(query, []bson.M{ - { - "$lookup": bson.M{ - "from": "removed_devices", - "localField": "uid", - "foreignField": "uid", - "as": "removed", - }, - }, - { - "$addFields": bson.M{ - "acceptable": bson.M{"$anyElementTrue": []interface{}{"$removed"}}, - }, - }, - { - "$unset": "removed", - }, - }...) - default: - query = append(query, bson.M{ - "$addFields": bson.M{ - "acceptable": true, + }) + case store.DeviceAcceptableIfNotAccepted: + query = append(query, bson.M{ + "$addFields": bson.M{ + "acceptable": bson.M{ + "$cond": bson.M{ + "if": bson.M{"$ne": bson.A{"$status", models.DeviceStatusAccepted}}, + "then": true, + "else": false, }, - }) - } - } - } - - // Apply filters if any - if len(queryMatch) > 0 { - query = append(query, queryMatch...) + }, + }, + }) } - queryCount := query - queryCount = append(queryCount, bson.M{"$count": "count"}) - count, err := AggregateCount(ctx, s.db.Collection("devices"), queryCount) + count, err := CountAllMatchingDocuments(ctx, s.db.Collection("devices"), query) if err != nil { return nil, 0, FromMongoError(err) } - orderVal := map[string]int{ - "asc": 1, - "desc": -1, - } - - if sort != "" { - query = append(query, bson.M{ - "$sort": bson.M{sort: orderVal[order]}, - }) - } else { - query = append(query, bson.M{ - "$sort": bson.M{"last_seen": -1}, - }) - } - - query = append(query, queries.BuildPaginationQuery(pagination)...) query = append(query, []bson.M{ { "$lookup": bson.M{ @@ -179,22 +131,35 @@ func (s *Store) DeviceList(ctx context.Context, pagination paginator.Query, filt return devices, count, FromMongoError(err) } -func (s *Store) DeviceGet(ctx context.Context, uid models.UID) (*models.Device, error) { +func (s *Store) DeviceResolve(ctx context.Context, resolver store.DeviceResolver, value string, opts ...store.QueryOption) (*models.Device, error) { + matchStage := bson.M{} + switch resolver { + case store.DeviceUIDResolver: + matchStage["uid"] = value + case store.DeviceHostnameResolver: + matchStage["name"] = value + case store.DeviceMACResolver: + matchStage["identity"] = bson.M{"mac": value} + } + query := []bson.M{ { - "$match": bson.M{"uid": uid}, - }, - { - "$lookup": bson.M{ - "from": "connected_devices", - "localField": "uid", - "foreignField": "uid", - "as": "online", - }, + "$match": matchStage, }, { "$addFields": bson.M{ - "online": bson.M{"$anyElementTrue": []interface{}{"$online"}}, + "online": bson.M{ + "$cond": bson.M{ + "if": bson.M{ + "$and": bson.A{ + bson.M{"$eq": bson.A{"$disconnected_at", nil}}, + bson.M{"$gt": bson.A{"$last_seen", primitive.NewDateTimeFromTime(time.Now().Add(-2 * time.Minute))}}, + }, + }, + "then": true, + "else": false, + }, + }, }, }, { @@ -213,478 +178,204 @@ func (s *Store) DeviceGet(ctx context.Context, uid models.UID) (*models.Device, { "$unwind": "$namespace", }, - } - - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, + { + "$lookup": bson.M{ + "from": "tags", + "localField": "tag_ids", + "foreignField": "_id", + "as": "tags", }, - }) + }, } - device := new(models.Device) + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, err + } + } cursor, err := s.db.Collection("devices").Aggregate(ctx, query) if err != nil { return nil, FromMongoError(err) } defer cursor.Close(ctx) - cursor.Next(ctx) - - err = cursor.Decode(&device) - if err != nil { - return nil, FromMongoError(err) - } - return device, nil -} - -func (s *Store) DeviceDelete(ctx context.Context, uid models.UID) error { - mongoSession, err := s.db.Client().StartSession() - if err != nil { - return FromMongoError(err) - } - defer mongoSession.EndSession(ctx) - - _, err = mongoSession.WithTransaction(ctx, func(mongoctx mongo.SessionContext) (interface{}, error) { - dev, err := s.db.Collection("devices").DeleteOne(ctx, bson.M{"uid": uid}) - if err != nil { - return nil, FromMongoError(err) - } - - if dev.DeletedCount < 1 { - return nil, store.ErrNoDocuments - } - - if err := s.cache.Delete(ctx, strings.Join([]string{"device", string(uid)}, "/")); err != nil { - logrus.Error(err) - } - - if _, err := s.db.Collection("sessions").DeleteMany(ctx, bson.M{"device_uid": uid}); err != nil { - return nil, FromMongoError(err) - } - - if _, err := s.db.Collection("connected_devices").DeleteMany(ctx, bson.M{"uid": uid}); err != nil { - return nil, FromMongoError(err) - } - - return nil, nil - }) - - return err -} - -func (s *Store) DeviceCreate(ctx context.Context, d models.Device, hostname string) error { - if hostname == "" { - hostname = strings.ReplaceAll(d.Identity.MAC, ":", "-") - } - - var dev *models.Device - if err := s.cache.Get(ctx, strings.Join([]string{"device", d.UID}, "/"), &dev); err != nil { - logrus.Error(err) - } - - q := bson.M{ - "$setOnInsert": bson.M{ - "name": hostname, - "status": "pending", - "status_updated_at": time.Now(), - "created_at": clock.Now(), - "tags": []string{}, - }, - "$set": d, - } - opts := options.Update().SetUpsert(true) - _, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": d.UID}, q, opts) - - return FromMongoError(err) -} - -func (s *Store) DeviceRename(ctx context.Context, uid models.UID, hostname string) error { - dev, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"name": hostname}}) - if err != nil { - return FromMongoError(err) - } - - if dev.MatchedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) DeviceLookup(ctx context.Context, namespace, hostname string) (*models.Device, error) { - ns := new(models.Namespace) - if err := s.db.Collection("namespaces").FindOne(ctx, bson.M{"name": namespace}).Decode(&ns); err != nil { - return nil, FromMongoError(err) - } + cursor.Next(ctx) device := new(models.Device) - if err := s.db.Collection("devices").FindOne(ctx, bson.M{"tenant_id": ns.TenantID, "name": hostname, "status": "accepted"}).Decode(&device); err != nil { + if err := cursor.Decode(&device); err != nil { return nil, FromMongoError(err) } return device, nil } -func (s *Store) DeviceSetOnline(ctx context.Context, uid models.UID, timestamp time.Time, online bool) error { - if !online { - _, err := s.db.Collection("connected_devices").DeleteMany(ctx, bson.M{"uid": uid}) - - return FromMongoError(err) - } - - collOptions := writeconcern.W1() - updateOptions := options.FindOneAndUpdate().SetUpsert(false).SetReturnDocument(options.Before) - - result := s.db.Collection("devices", options.Collection().SetWriteConcern(collOptions)). - FindOneAndUpdate(ctx, bson.M{"uid": uid}, - mongo.Pipeline{ - bson.D{ - bson.E{Key: "$set", Value: bson.M{"last_seen": bson.M{"$cond": bson.A{bson.M{"$lt": bson.A{"$last_seen", timestamp}}, timestamp, "$last_seen"}}}}, - }, - }, updateOptions) - if result.Err() != nil { - return FromMongoError(result.Err()) +func (s *Store) DeviceCreate(ctx context.Context, device *models.Device) (string, error) { + if _, err := s.db.Collection("devices").InsertOne(ctx, device); err != nil { + return "", FromMongoError(err) } - device := new(models.Device) - if err := result.Decode(&device); err != nil { - return FromMongoError(err) - } - - cd := &models.ConnectedDevice{ - UID: device.UID, - TenantID: device.TenantID, - LastSeen: device.LastSeen, - Status: string(device.Status), - } - - updated := cd.LastSeen.Before(timestamp) - if updated { - replaceOptions := options.Replace().SetUpsert(true) - _, err := s.db.Collection("connected_devices", options.Collection().SetWriteConcern(collOptions)). - ReplaceOne(ctx, bson.M{"uid": uid}, &cd, replaceOptions) - if err != nil { - return FromMongoError(err) - } - } - - return nil + return device.UID, nil } -func (s *Store) DeviceUpdateOnline(ctx context.Context, uid models.UID, online bool) error { - dev, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"online": online}}) - if err != nil { - return FromMongoError(err) - } - - if dev.MatchedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) DeviceUpdateLastSeen(ctx context.Context, uid models.UID, ts time.Time) error { - dev, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"last_seen": ts}}) - if err != nil { - return FromMongoError(err) - } - - if dev.MatchedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -// DeviceUpdateStatus updates the status of a specific device in the devices collection -func (s *Store) DeviceUpdateStatus(ctx context.Context, uid models.UID, status models.DeviceStatus) error { - updateOptions := options.FindOneAndUpdate().SetReturnDocument(options.After) - result := s.db.Collection("devices", options.Collection()). - FindOneAndUpdate(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"status": status, "status_updated_at": clock.Now()}}, updateOptions) - - if result.Err() != nil { - return FromMongoError(result.Err()) - } - - device := new(models.Device) - if err := result.Decode(&device); err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) DeviceListByUsage(ctx context.Context, tenant string) ([]models.UID, error) { - query := []bson.M{ +func (s *Store) DeviceConflicts(ctx context.Context, target *models.DeviceConflicts) ([]string, bool, error) { + pipeline := []bson.M{ { "$match": bson.M{ - "tenant_id": tenant, - }, - }, - { - "$group": bson.M{ - "_id": "$device_uid", - "count": bson.M{ - "$sum": 1, + "$or": []bson.M{ + {"name": target.Name}, }, + "status": bson.M{"$ne": models.DeviceStatusRemoved}, }, }, - { - "$sort": bson.M{ - "count": -1, - }, - }, - { - "$limit": 3, - }, } - uids := make([]models.UID, 0) - - cursor, err := s.db.Collection("sessions").Aggregate(ctx, query) + cursor, err := s.db.Collection("devices").Aggregate(ctx, pipeline) if err != nil { - return uids, FromMongoError(err) + return nil, false, FromMongoError(err) } + defer cursor.Close(ctx) + conflicts := make([]string, 0) for cursor.Next(ctx) { - var dev map[string]interface{} - - err = cursor.Decode(&dev) - if err != nil { - return uids, err + device := new(models.DeviceConflicts) + if err := cursor.Decode(&device); err != nil { + return nil, false, FromMongoError(err) } - uids = append(uids, models.UID(dev["_id"].(string))) - } - - return uids, nil -} - -func (s *Store) DeviceGetByMac(ctx context.Context, mac string, tenantID string, status models.DeviceStatus) (*models.Device, error) { - device := new(models.Device) - - switch status { - case "": - if err := s.db.Collection("devices").FindOne(ctx, bson.M{"tenant_id": tenantID, "identity": bson.M{"mac": mac}}).Decode(&device); err != nil { - return nil, FromMongoError(err) - } - default: - if err := s.db.Collection("devices").FindOne(ctx, bson.M{"tenant_id": tenantID, "status": status, "identity": bson.M{"mac": mac}}).Decode(&device); err != nil { - return nil, FromMongoError(err) + if device.Name == target.Name { + conflicts = append(conflicts, "name") } } - return device, nil -} - -func (s *Store) DeviceGetByName(ctx context.Context, name string, tenantID string, status models.DeviceStatus) (*models.Device, error) { - device := new(models.Device) - - if err := s.db.Collection("devices").FindOne(ctx, bson.M{"tenant_id": tenantID, "name": name, "status": string(status)}).Decode(&device); err != nil { - return nil, FromMongoError(err) - } - - return device, nil + return conflicts, len(conflicts) > 0, nil } -func (s *Store) DeviceGetByUID(ctx context.Context, uid models.UID, tenantID string) (*models.Device, error) { - var device *models.Device - if err := s.cache.Get(ctx, strings.Join([]string{"device", string(uid)}, "/"), &device); err != nil { - logrus.Error(err) - } - - if device != nil { - return device, nil +func (s *Store) DeviceUpdate(ctx context.Context, device *models.Device) error { + bsonBytes, err := bson.Marshal(device) + if err != nil { + return FromMongoError(err) } - if err := s.db.Collection("devices").FindOne(ctx, bson.M{"tenant_id": tenantID, "uid": uid}).Decode(&device); err != nil { - return nil, FromMongoError(err) + doc := make(bson.M) + if err := bson.Unmarshal(bsonBytes, &doc); err != nil { + return FromMongoError(err) } - if err := s.cache.Set(ctx, strings.Join([]string{"device", string(uid)}, "/"), device, time.Minute); err != nil { - logrus.Error(err) + // Convert string TagIDs to MongoDB ObjectIDs for referential integrity + delete(doc, "tags") + if tagIDs, ok := doc["tag_ids"].(bson.A); ok && len(tagIDs) > 0 { + for i, id := range tagIDs { + if idStr, ok := id.(string); ok { + objID, _ := primitive.ObjectIDFromHex(idStr) + tagIDs[i] = objID + } + } } - return device, nil -} - -func (s *Store) DeviceSetPosition(ctx context.Context, uid models.UID, position models.DevicePosition) error { - dev, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"position": position}}) + filter := bson.M{"uid": device.UID, "tenant_id": device.TenantID} + r, err := s.db.Collection("devices").UpdateOne(ctx, filter, bson.M{"$set": doc}) if err != nil { return FromMongoError(err) } - if dev.MatchedCount < 1 { + if r.MatchedCount < 1 { return store.ErrNoDocuments } - return nil -} - -func (s *Store) DeviceChooser(ctx context.Context, tenantID string, chosen []string) error { - filter := bson.M{ - "status": "accepted", - "tenant_id": tenantID, - "uid": bson.M{ - "$nin": chosen, - }, - } - - update := bson.M{ - "$set": bson.M{ - "status": "pending", - }, - } - - _, err := s.db.Collection("devices").UpdateMany(ctx, filter, update) - if err != nil { - return err + if err := s.cache.Delete(ctx, "device"+"/"+device.UID); err != nil { + logrus.WithError(err).WithField("uid", device.UID).Error("cannot delete device from cache") } return nil } -// DeviceChooser updates devices with "accepted" status to "pending" for a given tenantID, -// excluding devices with UIDs present in the "notIn" list. -func (s *Store) DeviceUpdate(ctx context.Context, tenant string, uid models.UID, name *string, publicURL *bool) error { - session, err := s.db.Client().StartSession() +func (s *Store) DeviceHeartbeat(ctx context.Context, uids []string, lastSeen time.Time) (int64, error) { + filter := bson.M{"uid": bson.M{"$in": uids}} + update := bson.M{"$set": bson.M{"last_seen": lastSeen, "disconnected_at": nil}} + r, err := s.db.Collection("devices").UpdateMany(ctx, filter, update) if err != nil { - return err + return 0, FromMongoError(err) } - defer session.EndSession(ctx) - - err = mongo.WithSession(ctx, session, func(sessionContext mongo.SessionContext) error { - if name != nil { - if _, err := s.db.Collection("devices").UpdateOne(sessionContext, bson.M{"tenant_id": tenant, "uid": uid}, bson.M{"$set": bson.M{"name": *name}}); err != nil { - return err - } + for _, uid := range uids { + if err := s.cache.Delete(ctx, "device"+"/"+uid); err != nil { + logrus.WithError(err).WithField("uid", uid).Error("cannot delete device from cache") } - - if publicURL != nil { - if _, err := s.db.Collection("devices").UpdateOne(sessionContext, bson.M{"tenant_id": tenant, "uid": uid}, bson.M{"$set": bson.M{"public_url": *publicURL}}); err != nil { - return err - } - } - - return nil - }) - - return FromMongoError(err) -} - -func (s *Store) DeviceRemovedCount(ctx context.Context, tenant string) (int64, error) { - count, err := s.db.Collection("removed_devices").CountDocuments(ctx, bson.M{"device.tenant_id": tenant}) - if err != nil { - return 0, FromMongoError(err) } - return count, nil + return r.ModifiedCount, nil } -func (s *Store) DeviceRemovedGet(ctx context.Context, tenant string, uid models.UID) (*models.DeviceRemoved, error) { - var slot models.DeviceRemoved - err := s.db.Collection("removed_devices").FindOne(ctx, bson.M{"device.tenant_id": tenant, "device.uid": uid}).Decode(&slot) - if err != nil { - return nil, FromMongoError(err) +func (s *Store) DeviceDelete(ctx context.Context, device *models.Device) error { + deletedCount, err := s.DeviceDeleteMany(ctx, []string{device.UID}) + switch { + case err != nil: + return err + case deletedCount < 1: + return store.ErrNoDocuments + default: + return nil } - - return &slot, nil } -func (s *Store) DeviceRemovedInsert(ctx context.Context, tenant string, device *models.Device) error { //nolint:revive - now := time.Now() +func (s *Store) DeviceDeleteMany(ctx context.Context, uids []string) (int64, error) { + fn := s.deviceDeleteManyFn(uids) - device.Status = models.DeviceStatusRemoved - device.StatusUpdatedAt = now + // Check if already inside a MongoDB transaction to avoid nested transactions. + // Nested transactions cause WriteConflict errors. + if mctx, ok := ctx.(mongo.SessionContext); ok { + logrus.Debug("reusing existing MongoDB session from context") - _, err := s.db.Collection("removed_devices").InsertOne(ctx, models.DeviceRemoved{ - Timestamp: now, - Device: device, - }) - if err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) DeviceRemovedDelete(ctx context.Context, tenant string, uid models.UID) error { - _, err := s.db.Collection("removed_devices").DeleteOne(ctx, bson.M{"device.tenant_id": tenant, "device.uid": uid}) - if err != nil { - return FromMongoError(err) - } - - return nil -} + deletedCount, err := fn(mctx) + if _, ok := deletedCount.(int64); !ok || err != nil { + logrus.WithError(err).WithField("uids", uids).Error("device deletion failed in existing session") -func (s *Store) DeviceRemovedList(ctx context.Context, tenant string, pagination paginator.Query, filters []models.Filter, sort string, order string) ([]models.DeviceRemoved, int, error) { - pipeline := []bson.M{ - { - "$match": bson.M{ - "device.tenant_id": tenant, - }, - }, - } + return 0, err + } - pipeline = append(pipeline, queries.BuildPaginationQuery(pagination)...) + return deletedCount.(int64), nil + } else { // nolint:revive + logrus.WithField("uids", uids).Debug("creating new MongoDB session") - if filters != nil { - queryFilter, err := queries.BuildFilterQuery(filters) + mongoSession, err := s.db.Client().StartSession() if err != nil { - return nil, 0, FromMongoError(err) + return 0, FromMongoError(err) } + defer mongoSession.EndSession(ctx) - pipeline = append(pipeline, queryFilter...) - } - - orderVal := map[string]int{ - "asc": 1, - "desc": -1, - } + deletedCount, err := mongoSession.WithTransaction(ctx, fn) + if _, ok := deletedCount.(int64); !ok || err != nil { + logrus.WithError(err).Error("device deletion transaction failed") - if sort != "" && order != "" { - pipeline = append(pipeline, bson.M{ - "$sort": bson.M{sort: orderVal[order]}, - }) - } else { - pipeline = append(pipeline, bson.M{ - "$sort": bson.M{"timestamp": -1}, - }) - } + return 0, err + } - aggregation, err := s.db.Collection("removed_devices").Aggregate(ctx, pipeline) - if err != nil { - return nil, 0, FromMongoError(err) + return deletedCount.(int64), nil } +} - var devices []models.DeviceRemoved - if err := aggregation.All(ctx, &devices); err != nil { - return nil, 0, FromMongoError(err) - } +func (s *Store) deviceDeleteManyFn(uids []string) func(ctx mongo.SessionContext) (any, error) { + return func(mctx mongo.SessionContext) (any, error) { + r, err := s.db.Collection("devices").DeleteMany(mctx, bson.M{"uid": bson.M{"$in": uids}}) + if err != nil { + return int64(0), FromMongoError(err) + } - return devices, len(devices), nil -} + if _, err := s.db.Collection("sessions").DeleteMany(mctx, bson.M{"device_uid": bson.M{"$in": uids}}); err != nil { + return int64(0), FromMongoError(err) + } -func (s *Store) DeviceCreatePublicURLAddress(ctx context.Context, uid models.UID) error { - _, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"public_url_address": fmt.Sprintf("%x", md5.Sum([]byte(uid)))}}) - if err != nil { - return FromMongoError(err) - } + if _, err := s.db.Collection("tunnels").DeleteMany(mctx, bson.M{"device": bson.M{"$in": uids}}); err != nil { + return int64(0), FromMongoError(err) + } - return nil -} + for _, uid := range uids { + if err := s.cache.Delete(mctx, strings.Join([]string{"device", uid}, "/")); err != nil { + logrus.WithError(err).WithField("uid", uid).Warn("deviceDeleteManyFn: cannot delete device from cache") + } + } -func (s *Store) DeviceGetByPublicURLAddress(ctx context.Context, address string) (*models.Device, error) { - device := new(models.Device) - if err := s.db.Collection("devices").FindOne(ctx, bson.M{"public_url_address": address}).Decode(&device); err != nil { - return nil, FromMongoError(err) + return r.DeletedCount, nil } - - return device, nil } diff --git a/api/store/mongo/device_tags.go b/api/store/mongo/device_tags.go deleted file mode 100644 index 83ad2763c2e..00000000000 --- a/api/store/mongo/device_tags.go +++ /dev/null @@ -1,64 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/models" - "go.mongodb.org/mongo-driver/bson" -) - -func (s *Store) DevicePushTag(ctx context.Context, uid models.UID, tag string) error { - t, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$push": bson.M{"tags": tag}}) - if err != nil { - return FromMongoError(err) - } - - if t.ModifiedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) DevicePullTag(ctx context.Context, uid models.UID, tag string) error { - t, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$pull": bson.M{"tags": tag}}) - if err != nil { - return FromMongoError(err) - } - - if t.ModifiedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) DeviceSetTags(ctx context.Context, uid models.UID, tags []string) (int64, int64, error) { - tag, err := s.db.Collection("devices").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"tags": tags}}) - - return tag.MatchedCount, tag.ModifiedCount, FromMongoError(err) -} - -func (s *Store) DeviceBulkRenameTag(ctx context.Context, tenant, currentTag, newTag string) (int64, error) { - res, err := s.db.Collection("devices").UpdateMany(ctx, bson.M{"tenant_id": tenant, "tags": currentTag}, bson.M{"$set": bson.M{"tags.$": newTag}}) - - return res.ModifiedCount, FromMongoError(err) -} - -func (s *Store) DeviceBulkDeleteTag(ctx context.Context, tenant, tag string) (int64, error) { - res, err := s.db.Collection("devices").UpdateMany(ctx, bson.M{"tenant_id": tenant}, bson.M{"$pull": bson.M{"tags": tag}}) - - return res.ModifiedCount, FromMongoError(err) -} - -func (s *Store) DeviceGetTags(ctx context.Context, tenant string) ([]string, int, error) { - list, err := s.db.Collection("devices").Distinct(ctx, "tags", bson.M{"tenant_id": tenant}) - - tags := make([]string, len(list)) - for i, item := range list { - tags[i] = item.(string) //nolint:forcetypeassert - } - - return tags, len(tags), FromMongoError(err) -} diff --git a/api/store/mongo/device_tags_test.go b/api/store/mongo/device_tags_test.go deleted file mode 100644 index 992af9f9d3f..00000000000 --- a/api/store/mongo/device_tags_test.go +++ /dev/null @@ -1,337 +0,0 @@ -package mongo - -import ( - "context" - "testing" - - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/assert" -) - -func TestDevicePushTag(t *testing.T) { - cases := []struct { - description string - uid models.UID - tag string - fixtures []string - expected error - }{ - { - description: "fails when device doesn't exist", - uid: models.UID("nonexistent"), - tag: "tag4", - fixtures: []string{fixtures.FixtureDevices}, - expected: store.ErrNoDocuments, - }, - { - description: "successfully creates single tag for an existing device", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tag: "tag4", - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DevicePushTag(context.TODO(), tc.uid, tc.tag) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestDevicePullTag(t *testing.T) { - cases := []struct { - description string - uid models.UID - tag string - fixtures []string - expected error - }{ - { - description: "fails when device doesn't exist", - uid: models.UID("nonexistent"), - tag: "tag-1", - fixtures: []string{fixtures.FixtureDevices}, - expected: store.ErrNoDocuments, - }, - { - description: "fails when device's tag doesn't exist", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tag: "nonexistent", - fixtures: []string{fixtures.FixtureDevices}, - expected: store.ErrNoDocuments, - }, - { - description: "successfully remove a single tag for an existing device", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tag: "tag-1", - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DevicePullTag(context.TODO(), tc.uid, tc.tag) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestDeviceSetTags(t *testing.T) { - type Expected struct { - matchedCount int64 - updatedCount int64 - err error - } - cases := []struct { - description string - uid models.UID - tags []string - fixtures []string - expected Expected - }{ - { - description: "successfully when device doesn't exist", - uid: models.UID("nonexistent"), - tags: []string{"new-tag"}, - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - matchedCount: 0, - updatedCount: 0, - err: nil, - }, - }, - { - description: "successfully when tags are equal than current device's tags", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tags: []string{"tag-1"}, - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - matchedCount: 1, - updatedCount: 0, - err: nil, - }, - }, - { - description: "successfully update tags for an existing device", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tags: []string{"new-tag"}, - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - matchedCount: 1, - updatedCount: 1, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - matchedCount, updatedCount, err := mongostore.DeviceSetTags(context.TODO(), tc.uid, tc.tags) - assert.Equal(t, tc.expected, Expected{matchedCount, updatedCount, err}) - }) - } -} - -func TestDeviceBulkRenameTag(t *testing.T) { - type Expected struct { - count int64 - err error - } - - cases := []struct { - description string - tenant string - oldTag string - newTag string - fixtures []string - expected Expected - }{ - { - description: "fails when tenant doesn't exist", - tenant: "nonexistent", - oldTag: "tag-1", - newTag: "newtag", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "fails when device's tag doesn't exist", - tenant: "00000000-0000-4000-0000-000000000000", - oldTag: "nonexistent", - newTag: "newtag", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "successfully rename tag for an existing device", - tenant: "00000000-0000-4000-0000-000000000000", - oldTag: "tag-1", - newTag: "newtag", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - count: 2, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - count, err := mongostore.DeviceBulkRenameTag(context.TODO(), tc.tenant, tc.oldTag, tc.newTag) - assert.Equal(t, tc.expected, Expected{count, err}) - }) - } -} - -func TestDeviceBulkDeleteTag(t *testing.T) { - type Expected struct { - count int64 - err error - } - - cases := []struct { - description string - tenant string - tag string - fixtures []string - expected Expected - }{ - { - description: "fails when tenant doesn't exist", - tenant: "nonexistent", - tag: "tag-1", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "fails when device's tag doesn't exist", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "nonexistent", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "successfully delete single tag for an existing device", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "tag-1", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - count: 2, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - count, err := mongostore.DeviceBulkDeleteTag(context.TODO(), tc.tenant, tc.tag) - assert.Equal(t, tc.expected, Expected{count, err}) - }) - } -} - -func TestDeviceGetTags(t *testing.T) { - type Expected struct { - tags []string - len int - err error - } - - cases := []struct { - description string - tenant string - fixtures []string - expected Expected - }{ - { - description: "succeeds when tags list is greater than 1", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - tags: []string{"tag-1"}, - len: 1, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - tags, count, err := mongostore.DeviceGetTags(context.TODO(), tc.tenant) - assert.Equal(t, tc.expected, Expected{tags: tags, len: count, err: err}) - }) - } -} diff --git a/api/store/mongo/device_test.go b/api/store/mongo/device_test.go index 2a456120a94..feaeba9c549 100644 --- a/api/store/mongo/device_test.go +++ b/api/store/mongo/device_test.go @@ -1,18 +1,17 @@ -package mongo +package mongo_test import ( "context" "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson" ) func TestDeviceList(t *testing.T) { @@ -23,22 +22,18 @@ func TestDeviceList(t *testing.T) { } cases := []struct { description string - paginator paginator.Query - filters []models.Filter - status models.DeviceStatus - sort string - order string + opts []store.QueryOption fixtures []string expected Expected }{ { description: "succeeds when no devices are found", - filters: nil, - paginator: paginator.Query{Page: -1, PerPage: -1}, - status: models.DeviceStatus(""), - sort: "last_seen", - order: "asc", - fixtures: []string{}, + opts: []store.QueryOption{ + s.Options().Match(&query.Filters{}), + s.Options().Sort(&query.Sorter{By: "last_seen", Order: query.OrderAsc}), + s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1}), + }, + fixtures: []string{}, expected: Expected{ dev: []models.Device{}, len: 0, @@ -47,93 +42,120 @@ func TestDeviceList(t *testing.T) { }, { description: "succeeds when devices are found", - filters: nil, - paginator: paginator.Query{Page: -1, PerPage: -1}, - status: models.DeviceStatus(""), - sort: "last_seen", - order: "asc", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, + opts: []store.QueryOption{ + s.Options().Match(&query.Filters{}), + s.Options().Sort(&query.Sorter{By: "last_seen", Order: query.OrderAsc}), + s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1}), + }, + fixtures: []string{fixtureNamespaces, fixtureTags, fixtureDevices}, expected: Expected{ dev: []models.Device{ { - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", - Name: "device-1", - Identity: &models.DeviceIdentity{MAC: "mac-1"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", + Name: "device-1", + Identity: &models.DeviceIdentity{MAC: "mac-1"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, { - CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - UID: "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", - Name: "device-2", - Identity: &models.DeviceIdentity{MAC: "mac-2"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + UID: "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", + Name: "device-2", + Identity: &models.DeviceIdentity{MAC: "mac-2"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{}, + Tags: []models.Tag{}, + }, }, { - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, { - CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", - Name: "device-4", - Identity: &models.DeviceIdentity{MAC: "mac-4"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "pending", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", + Name: "device-4", + Identity: &models.DeviceIdentity{MAC: "mac-4"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "pending", + RemoteAddr: "", + Position: nil, + Acceptable: true, + Taggable: models.Taggable{ + TagIDs: []string{}, + Tags: []models.Tag{}, + }, }, }, len: 4, @@ -142,148 +164,70 @@ func TestDeviceList(t *testing.T) { }, { description: "succeeds when devices are found with limited page and page size", - filters: nil, - paginator: paginator.Query{Page: 2, PerPage: 2}, - status: models.DeviceStatus(""), - sort: "last_seen", - order: "asc", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, - expected: Expected{ - dev: []models.Device{ - { - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - { - CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", - Name: "device-4", - Identity: &models.DeviceIdentity{MAC: "mac-4"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "pending", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - }, - len: 4, - err: nil, + opts: []store.QueryOption{ + s.Options().Match(&query.Filters{}), + s.Options().Sort(&query.Sorter{By: "last_seen", Order: query.OrderAsc}), + s.Options().Paginate(&query.Paginator{Page: 2, PerPage: 2}), }, - }, - { - description: "succeeds when devices are found with sort created_at", - filters: nil, - paginator: paginator.Query{Page: -1, PerPage: -1}, - status: models.DeviceStatus(""), - sort: "created_at", - order: "asc", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, + fixtures: []string{fixtureNamespaces, fixtureTags, fixtureDevices}, expected: Expected{ dev: []models.Device{ { - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", - Name: "device-1", - Identity: &models.DeviceIdentity{MAC: "mac-1"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - { - CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - UID: "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", - Name: "device-2", - Identity: &models.DeviceIdentity{MAC: "mac-2"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - { - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, { - CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", - Name: "device-4", - Identity: &models.DeviceIdentity{MAC: "mac-4"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "pending", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", + Name: "device-4", + Identity: &models.DeviceIdentity{MAC: "mac-4"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "pending", + RemoteAddr: "", + Position: nil, + Acceptable: true, + Taggable: models.Taggable{ + TagIDs: []string{}, + Tags: []models.Tag{}, + }, }, }, len: 4, @@ -292,93 +236,120 @@ func TestDeviceList(t *testing.T) { }, { description: "succeeds when devices are found with order asc", - filters: nil, - paginator: paginator.Query{Page: -1, PerPage: -1}, - status: models.DeviceStatus(""), - sort: "last_seen", - order: "asc", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, + opts: []store.QueryOption{ + s.Options().Match(&query.Filters{}), + s.Options().Sort(&query.Sorter{By: "last_seen", Order: query.OrderAsc}), + s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1}), + }, + fixtures: []string{fixtureNamespaces, fixtureTags, fixtureDevices}, expected: Expected{ dev: []models.Device{ { - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", - Name: "device-1", - Identity: &models.DeviceIdentity{MAC: "mac-1"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", + Name: "device-1", + Identity: &models.DeviceIdentity{MAC: "mac-1"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, { - CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - UID: "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", - Name: "device-2", - Identity: &models.DeviceIdentity{MAC: "mac-2"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + UID: "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", + Name: "device-2", + Identity: &models.DeviceIdentity{MAC: "mac-2"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{}, + Tags: []models.Tag{}, + }, }, { - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, { - CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", - Name: "device-4", - Identity: &models.DeviceIdentity{MAC: "mac-4"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "pending", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", + Name: "device-4", + Identity: &models.DeviceIdentity{MAC: "mac-4"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "pending", + RemoteAddr: "", + Position: nil, + Acceptable: true, + Taggable: models.Taggable{ + TagIDs: []string{}, + Tags: []models.Tag{}, + }, }, }, len: 4, @@ -387,93 +358,120 @@ func TestDeviceList(t *testing.T) { }, { description: "succeeds when devices are found with order desc", - filters: nil, - paginator: paginator.Query{Page: -1, PerPage: -1}, - status: models.DeviceStatus(""), - sort: "last_seen", - order: "desc", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, + opts: []store.QueryOption{ + s.Options().Match(&query.Filters{}), + s.Options().Sort(&query.Sorter{By: "last_seen", Order: query.OrderDesc}), + s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1}), + }, + fixtures: []string{fixtureNamespaces, fixtureTags, fixtureDevices}, expected: Expected{ dev: []models.Device{ { - CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", - Name: "device-4", - Identity: &models.DeviceIdentity{MAC: "mac-4"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "pending", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", + Name: "device-4", + Identity: &models.DeviceIdentity{MAC: "mac-4"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "pending", + RemoteAddr: "", + Position: nil, + Acceptable: true, + Taggable: models.Taggable{ + TagIDs: []string{}, + Tags: []models.Tag{}, + }, }, { - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, { - CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - UID: "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", - Name: "device-2", - Identity: &models.DeviceIdentity{MAC: "mac-2"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + UID: "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", + Name: "device-2", + Identity: &models.DeviceIdentity{MAC: "mac-2"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{}, + Tags: []models.Tag{}, + }, }, { - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", - Name: "device-1", - Identity: &models.DeviceIdentity{MAC: "mac-1"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", + Name: "device-1", + Identity: &models.DeviceIdentity{MAC: "mac-1"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, }, len: 4, @@ -482,33 +480,35 @@ func TestDeviceList(t *testing.T) { }, { description: "succeeds when devices are found filtering status", - filters: nil, - paginator: paginator.Query{Page: -1, PerPage: -1}, - status: models.DeviceStatusPending, - sort: "last_seen", - order: "asc", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, + opts: []store.QueryOption{ + s.Options().WithDeviceStatus(models.DeviceStatusPending), + s.Options().Match(&query.Filters{}), + s.Options().Sort(&query.Sorter{By: "last_seen", Order: query.OrderAsc}), + s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1}), + }, + fixtures: []string{fixtureNamespaces, fixtureTags, fixtureDevices}, expected: Expected{ dev: []models.Device{ { - CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), - UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", - Name: "device-4", - Identity: &models.DeviceIdentity{MAC: "mac-4"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Namespace: "namespace-1", - Status: "pending", - RemoteAddr: "", - Position: nil, - Tags: []string{}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: true, + CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), + UID: "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d", + Name: "device-4", + Identity: &models.DeviceIdentity{MAC: "mac-4"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "pending", + RemoteAddr: "", + Position: nil, + Acceptable: true, + Taggable: models.Taggable{ + TagIDs: []string{}, + Tags: []models.Tag{}, + }, }, }, len: 1, @@ -517,901 +517,494 @@ func TestDeviceList(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - dev, count, err := mongostore.DeviceList( - context.TODO(), - tc.paginator, - tc.filters, - tc.status, - tc.sort, - tc.order, - store.DeviceListModeDefault, - ) - assert.Equal(t, tc.expected, Expected{dev: dev, len: count, err: err}) - }) - } -} + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) -func TestDeviceListByUsage(t *testing.T) { - type Expected struct { - uid []models.UID - len int - err error - } - cases := []struct { - description string - tenant string - fixtures []string - expected Expected - }{ - { - description: "returns an empty list when tenant not exist", - tenant: "nonexistent", - fixtures: []string{fixtures.FixtureSessions}, - expected: Expected{ - uid: []models.UID{}, - len: 0, - err: nil, - }, - }, - { - description: "succeeds when has 1 or more device sessions", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureSessions}, - expected: Expected{ - uid: []models.UID{"2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"}, - len: 1, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - uids, err := mongostore.DeviceListByUsage(context.TODO(), tc.tenant) - assert.Equal(t, tc.expected, Expected{uid: uids, len: len(uids), err: err}) + dev, count, err := s.DeviceList(ctx, store.DeviceAcceptableIfNotAccepted, tc.opts...) + assert.Equal(t, tc.expected, Expected{dev: dev, len: count, err: err}) }) } } -func TestDeviceGet(t *testing.T) { +func TestDeviceResolve(t *testing.T) { type Expected struct { dev *models.Device err error } - cases := []struct { - description string - uid models.UID - fixtures []string - expected Expected - }{ - { - description: "fails when namespace is not found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - fixtures: []string{fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "fails when device is not found", - uid: models.UID("nonexistent"), - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "fails when device is not found due to tenant", - uid: models.UID("5600560h6ed5h960969e7f358g4568491247198ge8537e9g448609fff1b231f"), - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "succeeds when device is found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices, fixtures.FixtureConnectedDevices}, - expected: Expected{ - dev: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - err: nil, - }, - }, - } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - dev, err := mongostore.DeviceGet(context.TODO(), tc.uid) - assert.Equal(t, tc.expected, Expected{dev: dev, err: err}) - }) - } -} - -func TestDeviceGetByMac(t *testing.T) { - type Expected struct { - dev *models.Device - err error - } cases := []struct { description string - mac string - tenant string - status models.DeviceStatus + resolver store.DeviceResolver + value string fixtures []string expected Expected }{ { - description: "fails when device is not found due to mac", - mac: "nonexistent", - tenant: "00000000-0000-4000-0000-000000000000", - status: models.DeviceStatus(""), - fixtures: []string{fixtures.FixtureDevices}, + description: "fails when device not found by UID", + resolver: store.DeviceUIDResolver, + value: "nonexistent", + fixtures: []string{fixtureDevices}, expected: Expected{ dev: nil, err: store.ErrNoDocuments, }, }, { - description: "fails when device is not found due to tenant", - mac: "mac-3", - tenant: "nonexistent", - status: models.DeviceStatus(""), - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "succeeds when device is found", - mac: "mac-3", - tenant: "00000000-0000-4000-0000-000000000000", - status: models.DeviceStatus(""), - fixtures: []string{fixtures.FixtureDevices}, + description: "succeeds resolving device by UID", + resolver: store.DeviceUIDResolver, + value: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + fixtures: []string{fixtureNamespaces, fixtureTags, fixtureDevices}, expected: Expected{ dev: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Status: "accepted", + RemoteAddr: "", + Position: nil, + Namespace: "namespace-1", + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, err: nil, }, }, { - description: "succeeds when device with status is found", - mac: "mac-3", - tenant: "00000000-0000-4000-0000-000000000000", - status: models.DeviceStatus("accepted"), - fixtures: []string{fixtures.FixtureDevices}, + description: "succeeds resolving device by hostname", + resolver: store.DeviceHostnameResolver, + value: "device-3", + fixtures: []string{fixtureNamespaces, fixtureTags, fixtureDevices}, expected: Expected{ dev: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Status: "accepted", + RemoteAddr: "", + Position: nil, + Namespace: "namespace-1", + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, err: nil, }, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - dev, err := mongostore.DeviceGetByMac(context.TODO(), tc.mac, tc.tenant, tc.status) - assert.Equal(t, tc.expected, Expected{dev: dev, err: err}) - }) - } -} - -func TestDeviceGetByName(t *testing.T) { - type Expected struct { - dev *models.Device - err error - } - cases := []struct { - description string - hostname string - tenant string - status models.DeviceStatus - fixtures []string - expected Expected - }{ - { - description: "fails when device is not found due to name", - hostname: "nonexistent", - tenant: "00000000-0000-4000-0000-000000000000", - status: models.DeviceStatusAccepted, - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "fails when device is not found due to tenant", - hostname: "device-3", - tenant: "nonexistent", - status: models.DeviceStatusAccepted, - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, - }, { - description: "succeeds when device is found", - hostname: "device-3", - tenant: "00000000-0000-4000-0000-000000000000", - status: models.DeviceStatusAccepted, - fixtures: []string{fixtures.FixtureDevices}, + description: "succeeds resolving device by MAC", + resolver: store.DeviceMACResolver, + value: "mac-3", + fixtures: []string{fixtureNamespaces, fixtureTags, fixtureDevices}, expected: Expected{ dev: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Status: "accepted", + RemoteAddr: "", + Position: nil, + Namespace: "namespace-1", + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { assert.NoError(t, srv.Reset()) }) - dev, err := mongostore.DeviceGetByName(context.TODO(), tc.hostname, tc.tenant, tc.status) + dev, err := s.DeviceResolve(context.Background(), tc.resolver, tc.value) assert.Equal(t, tc.expected, Expected{dev: dev, err: err}) }) } } -func TestDeviceGetByUID(t *testing.T) { +func TestDeviceCreate(t *testing.T) { type Expected struct { - dev *models.Device - err error + insertedUID string + err error } + cases := []struct { description string - uid models.UID - tenant string + device *models.Device fixtures []string expected Expected }{ { - description: "fails when device is not found due to UID", - uid: models.UID("nonexistent"), - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "fails when device is not found due to tenant", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tenant: "nonexistent", - fixtures: []string{fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, + description: "succeeds when creating new device", + device: &models.Device{ + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Identity: &models.DeviceIdentity{ + MAC: "mac-3", + }, + TenantID: "00000000-0000-4000-0000-000000000000", + LastSeen: clock.Now(), }, - }, - { - description: "succeeds when device is found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureDevices}, + fixtures: []string{}, expected: Expected{ - dev: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - err: nil, + insertedUID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + t.Run(tc.description, func(tt *testing.T) { + assert.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) - dev, err := mongostore.DeviceGetByUID(context.TODO(), tc.uid, tc.tenant) - assert.Equal(t, tc.expected, Expected{dev: dev, err: err}) + insertedUID, err := s.DeviceCreate(context.Background(), tc.device) + assert.Equal(tt, tc.expected, Expected{insertedUID, err}) }) } } -func TestDeviceLookup(t *testing.T) { +func TestDeviceConflicts(t *testing.T) { type Expected struct { - dev *models.Device - err error + conflicts []string + ok bool + err error } + cases := []struct { description string - namespace string - hostname string + target *models.DeviceConflicts fixtures []string expected Expected }{ { - description: "fails when namespace does not exist", - namespace: "nonexistent", - hostname: "device-3", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "fails when device does not exist due to name", - namespace: "namespace-1", - hostname: "nonexistent", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, + description: "no conflicts when target is empty", + target: &models.DeviceConflicts{}, + fixtures: []string{fixtureDevices}, + expected: Expected{[]string{}, false, nil}, }, { - description: "fails when device does not exist due to tenant-id", - namespace: "namespace-1", - hostname: "invalid_tenant", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, + description: "no conflicts with non existing email", + target: &models.DeviceConflicts{Name: "nonexistent"}, + fixtures: []string{fixtureDevices}, + expected: Expected{[]string{}, false, nil}, }, { - description: "fails when device does not exist due to status other than accepted", - namespace: "namespace-1", - hostname: "pending", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices}, - expected: Expected{ - dev: nil, - err: store.ErrNoDocuments, - }, + description: "conflict detected with existing email", + target: &models.DeviceConflicts{Name: "device-1"}, + fixtures: []string{fixtureDevices}, + expected: Expected{[]string{"name"}, true, nil}, }, { - description: "succeeds when namespace exists and hostname status is accepted", - namespace: "namespace-1", - hostname: "device-3", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices}, - expected: Expected{ - dev: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: false, - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - err: nil, - }, + description: "no conflict with removed device name", + target: &models.DeviceConflicts{Name: "device-removed"}, + fixtures: []string{fixtureDevicesWithRemoved}, + expected: Expected{[]string{}, false, nil}, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - dev, err := mongostore.DeviceLookup(context.TODO(), tc.namespace, tc.hostname) - assert.Equal(t, tc.expected, Expected{dev: dev, err: err}) + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) + + conflicts, ok, err := s.DeviceConflicts(ctx, tc.target) + require.Equal(t, tc.expected, Expected{conflicts, ok, err}) }) } } -func TestDeviceCreate(t *testing.T) { +func TestDeviceUpdate(t *testing.T) { cases := []struct { description string - hostname string - device models.Device + device *models.Device fixtures []string expected error }{ { - description: "succeeds when all data is valid", - hostname: "device-3", - device: models.Device{ - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Identity: &models.DeviceIdentity{ - MAC: "mac-3", - }, + description: "fails when the device is not found due to uid", + device: &models.Device{ + UID: "nonexistent", TenantID: "00000000-0000-4000-0000-000000000000", - LastSeen: clock.Now(), }, - fixtures: []string{}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeviceCreate(context.TODO(), tc.device, tc.hostname) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestDeviceRename(t *testing.T) { - cases := []struct { - description string - uid models.UID - hostname string - fixtures []string - expected error - }{ - { - description: "fails when the device is not found", - uid: models.UID("nonexistent"), - hostname: "new_hostname", - fixtures: []string{fixtures.FixtureDevices}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when the device is found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - hostname: "new_hostname", - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, + fixtures: []string{fixtureDevices}, + expected: store.ErrNoDocuments, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeviceRename(context.TODO(), tc.uid, tc.hostname) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestDeviceUpdateStatus(t *testing.T) { - cases := []struct { - description string - uid models.UID - status string - fixtures []string - expected error - }{ { - description: "fails when the device is not found", - uid: models.UID("nonexistent"), - status: "accepted", - fixtures: []string{fixtures.FixtureDevices}, - expected: store.ErrNoDocuments, + description: "fails when the device is not found due to tenantID", + device: &models.Device{ + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + TenantID: "nonexistent", + }, + fixtures: []string{fixtureDevices}, + expected: store.ErrNoDocuments, }, { - description: "succeeds when the device is found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - status: "accepted", - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, + description: "succeeds when the device", + device: &models.Device{ + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + fixtures: []string{fixtureDevices}, + expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeviceUpdateStatus(context.TODO(), tc.uid, models.DeviceStatus(tc.status)) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestDeviceUpdateOnline(t *testing.T) { - cases := []struct { - description string - uid models.UID - online bool - fixtures []string - expected error - }{ - { - description: "fails when the device is not found", - uid: models.UID("nonexistent"), - online: true, - fixtures: []string{fixtures.FixtureDevices}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when the device is found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - online: true, - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, - }, - } + ctx := context.Background() - db := dbtest.DBServer{} - defer db.Stop() + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeviceUpdateOnline(context.TODO(), tc.uid, tc.online) + err := s.DeviceUpdate(ctx, tc.device) assert.Equal(t, tc.expected, err) }) } } -func TestDeviceUpdateLastSeen(t *testing.T) { - cases := []struct { - description string - uid models.UID - now time.Time - fixtures []string - expected error - }{ - { - description: "fails when the device is not found", - uid: models.UID("nonexistent"), - now: time.Now(), - fixtures: []string{fixtures.FixtureDevices}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when the device is found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - now: time.Now(), - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeviceUpdateLastSeen(context.TODO(), tc.uid, tc.now) - assert.Equal(t, tc.expected, err) - }) +func TestDeviceHeartbeat(t *testing.T) { + type Expected struct { + modifiedCount int64 + err error } -} -func TestDeviceSetOnline(t *testing.T) { cases := []struct { description string - uid models.UID - online bool + uids []string + lastSeen time.Time fixtures []string - expected error + expected Expected }{ { - description: "succeeds when UID is valid and online is true", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - online: true, - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, + description: "succeeds when no devices match", + uids: []string{"nonexistent1", "nonexistent2"}, + lastSeen: time.Now(), + fixtures: []string{fixtureDevices}, + expected: Expected{ + modifiedCount: 0, + err: nil, + }, }, { - description: "succeeds when UID is valid and online is false", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - online: false, - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, + description: "succeeds when devices match", + uids: []string{ + "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", + }, + lastSeen: time.Now(), + fixtures: []string{fixtureDevices}, + expected: Expected{ + modifiedCount: 2, + err: nil, + }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeviceSetOnline(context.TODO(), tc.uid, time.Now(), tc.online) - assert.Equal(t, tc.expected, err) + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + modifiedCount, err := s.DeviceHeartbeat(ctx, tc.uids, tc.lastSeen) + require.Equal(t, Expected{modifiedCount: modifiedCount, err: err}, tc.expected) + + if tc.expected.modifiedCount > 0 { + cursor, err := db.Collection("devices").Find(ctx, bson.M{"uid": bson.M{"$in": tc.uids}}) + require.NoError(t, err) + + for cursor.Next(ctx) { + device := new(models.Device) + require.NoError(t, cursor.Decode(device)) + require.WithinDuration(t, tc.lastSeen, device.LastSeen, 2*time.Second) + require.Nil(t, device.DisconnectedAt) + } + } }) } } -func TestDeviceSetPosition(t *testing.T) { +func TestDeviceDelete(t *testing.T) { cases := []struct { description string - uid models.UID - position models.DevicePosition + device *models.Device fixtures []string expected error }{ { - description: "fails when the device is not found", - uid: models.UID("nonexistent"), - position: models.DevicePosition{ - Longitude: 1, - Latitude: 1, + description: "fails when device is not found", + device: &models.Device{ + UID: "nonexistent", }, - fixtures: []string{fixtures.FixtureDevices}, + fixtures: []string{fixtureDevices}, expected: store.ErrNoDocuments, }, { - description: "succeeds when the device is found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - position: models.DevicePosition{ - Longitude: 1, - Latitude: 1, + description: "succeeds when device is found", + device: &models.Device{ + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", }, - fixtures: []string{fixtures.FixtureDevices}, + fixtures: []string{fixtureDevices}, expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - err := mongostore.DeviceSetPosition(context.TODO(), tc.uid, tc.position) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + err := s.DeviceDelete(ctx, tc.device) assert.Equal(t, tc.expected, err) }) } } -func TestDeviceChooser(t *testing.T) { - cases := []struct { - description string - tenant string - chosen []string - fixtures []string - expected error - }{ - { - description: "", - tenant: "00000000-0000-4000-0000-000000000000", - chosen: []string{""}, - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeviceChooser(context.TODO(), tc.tenant, tc.chosen) - assert.Equal(t, tc.expected, err) - }) +func TestDeviceDeleteMany(t *testing.T) { + type Expected struct { + deletedCount int64 + err error } -} -func TestDeviceDelete(t *testing.T) { cases := []struct { description string - uid models.UID + uids []string fixtures []string - expected error + expected Expected }{ { - description: "fails when device is not found", - uid: models.UID("nonexistent"), - fixtures: []string{fixtures.FixtureDevices}, - expected: store.ErrNoDocuments, + description: "succeeds when no devices match", + uids: []string{}, + fixtures: []string{fixtureDevices}, + expected: Expected{ + deletedCount: 0, + err: nil, + }, }, { - description: "succeeds when device is found", - uid: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), - fixtures: []string{fixtures.FixtureDevices}, - expected: nil, + description: "succeeds when devices match", + uids: []string{ + "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e", + }, + fixtures: []string{fixtureDevices}, + expected: Expected{ + deletedCount: 2, + err: nil, + }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeviceDelete(context.TODO(), tc.uid) - assert.Equal(t, tc.expected, err) + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + deletedCount, err := s.DeviceDeleteMany(ctx, tc.uids) + require.Equal(t, tc.expected, Expected{deletedCount, err}) + if tc.expected.deletedCount > 0 { + for _, uid := range tc.uids { + count, err := db.Collection("devices").CountDocuments(ctx, bson.M{"uid": uid}) + require.NoError(t, err) + require.Equal(t, int64(0), count) + } + } }) } } diff --git a/api/store/mongo/firewall.go b/api/store/mongo/firewall.go deleted file mode 100644 index def1beccc07..00000000000 --- a/api/store/mongo/firewall.go +++ /dev/null @@ -1,130 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/shellhub-io/shellhub/api/pkg/gateway" - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mongo/queries" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/models" - "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/bson/primitive" - "go.mongodb.org/mongo-driver/mongo/options" -) - -func (s *Store) FirewallRuleList(ctx context.Context, pagination paginator.Query) ([]models.FirewallRule, int, error) { - query := []bson.M{ - { - "$sort": bson.M{ - "priority": 1, - }, - }, - } - - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }) - } - - queryCount := query - queryCount = append(queryCount, bson.M{"$count": "count"}) - count, err := AggregateCount(ctx, s.db.Collection("firewall_rules"), queryCount) - if err != nil { - return nil, 0, FromMongoError(err) - } - - query = append(query, queries.BuildPaginationQuery(pagination)...) - - rules := make([]models.FirewallRule, 0) - cursor, err := s.db.Collection("firewall_rules").Aggregate(ctx, query) - if err != nil { - return nil, 0, FromMongoError(err) - } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - rule := new(models.FirewallRule) - err = cursor.Decode(&rule) - if err != nil { - return rules, count, FromMongoError(err) - } - - rules = append(rules, *rule) - } - - return rules, count, FromMongoError(err) -} - -func (s *Store) FirewallRuleCreate(ctx context.Context, rule *models.FirewallRule) error { - if err := rule.Validate(); err != nil { - return FromMongoError(err) - } - - if _, err := s.db.Collection("firewall_rules").InsertOne(ctx, &rule); err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) FirewallRuleGet(ctx context.Context, id string) (*models.FirewallRule, error) { - rule := new(models.FirewallRule) - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return nil, FromMongoError(err) - } - - if err := s.db.Collection("firewall_rules").FindOne(ctx, bson.M{"_id": objID}).Decode(&rule); err != nil { - return nil, FromMongoError(err) - } - - return rule, nil -} - -func (s *Store) FirewallRuleUpdate(ctx context.Context, id string, rule models.FirewallRuleUpdate) (*models.FirewallRule, error) { - if err := rule.Validate(); err != nil { - return nil, FromMongoError(err) - } - - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return nil, FromMongoError(err) - } - - updateOpts := options.FindOneAndUpdate().SetReturnDocument(options.After) - result := s.db.Collection("firewall_rules").FindOneAndUpdate(ctx, bson.M{"_id": objID}, bson.M{"$set": rule}, updateOpts) - - if result.Err() != nil { - return nil, FromMongoError(result.Err()) - } - - firewallRule := new(models.FirewallRule) - if err := result.Decode(&firewallRule); err != nil { - return nil, FromMongoError(err) - } - - return firewallRule, nil -} - -func (s *Store) FirewallRuleDelete(ctx context.Context, id string) error { - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return FromMongoError(err) - } - - fRule, err := s.db.Collection("firewall_rules").DeleteOne(ctx, bson.M{"_id": objID}) - if err != nil { - return FromMongoError(err) - } - - if fRule.DeletedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} diff --git a/api/store/mongo/firewall_tags.go b/api/store/mongo/firewall_tags.go deleted file mode 100644 index 619de5833b8..00000000000 --- a/api/store/mongo/firewall_tags.go +++ /dev/null @@ -1,86 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/shellhub-io/shellhub/api/store" - "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/bson/primitive" -) - -func (s *Store) FirewallRulePushTag(ctx context.Context, id, tag string) error { - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return FromMongoError(err) - } - - result, err := s.db.Collection("firewall_rules").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$addToSet": bson.M{"filter.tags": tag}}) - if err != nil { - return err - } - - if result.ModifiedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) FirewallRulePullTag(ctx context.Context, id, tag string) error { - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return FromMongoError(err) - } - - result, err := s.db.Collection("firewall_rules").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$pull": bson.M{"filter.tags": tag}}) - if err != nil { - return err - } - - if result.ModifiedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) FirewallRuleSetTags(ctx context.Context, id string, tags []string) error { - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return FromMongoError(err) - } - - result, err := s.db.Collection("firewall_rules").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$set": bson.M{"filter.tags": tags}}) - if err != nil { - return err - } - - if result.ModifiedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) FirewallRuleBulkRenameTag(ctx context.Context, tenant, currentTag, newTag string) (int64, error) { - res, err := s.db.Collection("firewall_rules").UpdateMany(ctx, bson.M{"tenant_id": tenant, "filter.tags": currentTag}, bson.M{"$set": bson.M{"filter.tags.$": newTag}}) - - return res.ModifiedCount, FromMongoError(err) -} - -func (s *Store) FirewallRuleBulkDeleteTag(ctx context.Context, tenant, tag string) (int64, error) { - res, err := s.db.Collection("firewall_rules").UpdateMany(ctx, bson.M{"tenant_id": tenant}, bson.M{"$pull": bson.M{"filter.tags": tag}}) - - return res.ModifiedCount, FromMongoError(err) -} - -func (s *Store) FirewallRuleGetTags(ctx context.Context, tenant string) ([]string, int, error) { - list, err := s.db.Collection("firewall_rules").Distinct(ctx, "filter.tags", bson.M{"tenant_id": tenant}) - - tags := make([]string, len(list)) - for i, item := range list { - tags[i] = item.(string) //nolint:forcetypeassert - } - - return tags, len(tags), FromMongoError(err) -} diff --git a/api/store/mongo/firewall_tags_test.go b/api/store/mongo/firewall_tags_test.go deleted file mode 100644 index a442925cc20..00000000000 --- a/api/store/mongo/firewall_tags_test.go +++ /dev/null @@ -1,329 +0,0 @@ -package mongo - -import ( - "context" - "testing" - - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/stretchr/testify/assert" -) - -func TestFirewallRulePushTag(t *testing.T) { - cases := []struct { - description string - id string - tag string - fixtures []string - expected error - }{ - { - description: "fails when firewall rule is not found", - id: "6504b7bd9b6c4a63a9ccc053", - tag: "tag-1", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: store.ErrNoDocuments, - }, - { - description: "fails to add a tag that already exists", - id: "6504b7bd9b6c4a63a9ccc053", - tag: "tag-1", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds to add a new tag when firewall rule is found and tag is not set yet", - id: "6504b7bd9b6c4a63a9ccc053", - tag: "tag4", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.FirewallRulePushTag(context.TODO(), tc.id, tc.tag) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestFirewallRulePullTag(t *testing.T) { - cases := []struct { - description string - id string - tag string - fixtures []string - expected error - }{ - { - description: "fails when firewall rule is not found", - id: "6504b7bd9b6c4a63a9ccc054", - tag: "tag-1", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: store.ErrNoDocuments, - }, - { - description: "fails when firewall rule but tag is not", - id: "6504b7bd9b6c4a63a9ccc053", - tag: "nonexistent", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when firewall rule and tag is found", - id: "6504b7bd9b6c4a63a9ccc053", - tag: "tag-1", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.FirewallRulePullTag(context.TODO(), tc.id, tc.tag) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestFirewallRuleSetTags(t *testing.T) { - cases := []struct { - description string - id string - tags []string - fixtures []string - expected error - }{ - { - description: "fails when firewall rule is not found", - id: "6504b7bd9b6c4a63a9ccc054", - tags: []string{"tag-1", "tag2"}, - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when firewall rule and tag is found", - id: "6504b7bd9b6c4a63a9ccc053", - tags: []string{"tag-1", "tag2"}, - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.FirewallRuleSetTags(context.TODO(), tc.id, tc.tags) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestFirewallRuleBulkRenameTags(t *testing.T) { - type Expected struct { - count int64 - err error - } - - cases := []struct { - description string - tenant string - oldTag string - newTag string - fixtures []string - expected Expected - }{ - { - description: "fails when tenant is not found", - tenant: "nonexistent", - oldTag: "tag-1", - newTag: "edited-tag", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "fails when tag is not found", - tenant: "00000000-0000-4000-0000-000000000000", - oldTag: "nonexistent", - newTag: "edited-tag", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "succeeds when tenant and tag is found", - tenant: "00000000-0000-4000-0000-000000000000", - oldTag: "tag-1", - newTag: "edited-tag", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - count: 3, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - count, err := mongostore.FirewallRuleBulkRenameTag(context.TODO(), tc.tenant, tc.oldTag, tc.newTag) - assert.Equal(t, tc.expected, Expected{count, err}) - }) - } -} - -func TestFirewallRuleBulkDeleteTags(t *testing.T) { - type Expected struct { - count int64 - err error - } - - cases := []struct { - description string - tenant string - tag string - fixtures []string - expected Expected - }{ - { - description: "fails when tenant is not found", - tenant: "nonexistent", - tag: "tag-1", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "fails when tag is not found", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "nonexistent", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "succeeds when tenant and tag is found", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "tag-1", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - count: 3, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - count, err := mongostore.FirewallRuleBulkDeleteTag(context.TODO(), tc.tenant, tc.tag) - assert.Equal(t, tc.expected, Expected{count, err}) - }) - } -} - -func TestFirewallRuleGetTags(t *testing.T) { - type Expected struct { - tags []string - len int - err error - } - - cases := []struct { - description string - tenant string - fixtures []string - expected Expected - }{ - { - description: "succeeds when no one tag are found", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{}, - expected: Expected{ - tags: []string{}, - len: 0, - err: nil, - }, - }, - { - description: "succeeds when one or more tags are found", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - tags: []string{"tag-1"}, - len: 1, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - tags, count, err := mongostore.FirewallRuleGetTags(context.TODO(), tc.tenant) - assert.Equal(t, tc.expected, Expected{tags: tags, len: count, err: err}) - }) - } -} diff --git a/api/store/mongo/firewall_test.go b/api/store/mongo/firewall_test.go deleted file mode 100644 index 168a042dc96..00000000000 --- a/api/store/mongo/firewall_test.go +++ /dev/null @@ -1,369 +0,0 @@ -package mongo - -import ( - "context" - "sort" - "testing" - - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/assert" -) - -func TestFirewallRuleList(t *testing.T) { - type Expected struct { - rules []models.FirewallRule - len int - err error - } - - cases := []struct { - description string - page paginator.Query - fixtures []string - expected Expected - }{ - { - description: "succeeds when no firewall rules are found", - page: paginator.Query{Page: -1, PerPage: -1}, - fixtures: []string{}, - expected: Expected{ - rules: []models.FirewallRule{}, - len: 0, - err: nil, - }, - }, - { - description: "succeeds when a firewall rule is found", - page: paginator.Query{Page: -1, PerPage: -1}, - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - rules: []models.FirewallRule{ - { - ID: "6504b7bd9b6c4a63a9ccc053", - TenantID: "00000000-0000-4000-0000-000000000000", - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 1, - Action: "allow", - Active: true, - SourceIP: ".*", - Username: ".*", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{"tag-1"}, - }, - }, - }, - { - ID: "e92f4a5d3e1a4f7b8b2b6e9a", - TenantID: "00000000-0000-4000-0000-000000000000", - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 2, - Action: "allow", - Active: true, - SourceIP: "192.168.1.10", - Username: "john.doe", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{"tag-1"}, - }, - }, - }, - { - ID: "78c96f0a2e5b4dca8d78f00c", - TenantID: "00000000-0000-4000-0000-000000000000", - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 3, - Action: "allow", - Active: true, - SourceIP: "10.0.0.0/24", - Username: "admin", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{}, - }, - }, - }, - { - ID: "3fd759a1ecb64ec5a07c8c0f", - TenantID: "00000000-0000-4000-0000-000000000000", - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 4, - Action: "deny", - Active: true, - SourceIP: "172.16.0.0/16", - Username: ".*", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{"tag-1"}, - }, - }, - }, - }, - len: 4, - err: nil, - }, - }, - { - description: "succeeds when firewall rule list is not empty and paginator is different than -1", - page: paginator.Query{Page: 2, PerPage: 2}, - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - rules: []models.FirewallRule{ - { - ID: "78c96f0a2e5b4dca8d78f00c", - TenantID: "00000000-0000-4000-0000-000000000000", - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 3, - Action: "allow", - Active: true, - SourceIP: "10.0.0.0/24", - Username: "admin", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{}, - }, - }, - }, - { - ID: "3fd759a1ecb64ec5a07c8c0f", - TenantID: "00000000-0000-4000-0000-000000000000", - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 4, - Action: "deny", - Active: true, - SourceIP: "172.16.0.0/16", - Username: ".*", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{"tag-1"}, - }, - }, - }, - }, - len: 4, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - // Due to the non-deterministic order of applying fixtures when dealing with multiple datasets, - // we ensure that both the expected and result arrays are correctly sorted. - sort := func(fr []models.FirewallRule) { - sort.Slice(fr, func(i, j int) bool { - return fr[i].ID < fr[j].ID - }) - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - rules, count, err := mongostore.FirewallRuleList(context.TODO(), tc.page) - sort(tc.expected.rules) - sort(rules) - assert.Equal(t, tc.expected, Expected{rules: rules, len: count, err: err}) - }) - } -} - -func TestFirewallRuleGet(t *testing.T) { - type Expected struct { - rule *models.FirewallRule - err error - } - cases := []struct { - description string - id string - fixtures []string - expected Expected - }{ - { - description: "fails when firewall rule is not found", - id: "6504b7bd9b6c4a63a9ccc021", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - rule: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "succeeds when firewall rule is found", - id: "6504b7bd9b6c4a63a9ccc053", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - rule: &models.FirewallRule{ - ID: "6504b7bd9b6c4a63a9ccc053", - TenantID: "00000000-0000-4000-0000-000000000000", - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 1, - Action: "allow", - Active: true, - SourceIP: ".*", - Username: ".*", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{"tag-1"}, - }, - }, - }, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - rule, err := mongostore.FirewallRuleGet(context.TODO(), tc.id) - assert.Equal(t, tc.expected, Expected{rule: rule, err: err}) - }) - } -} - -func TestFirewallRuleUpdate(t *testing.T) { - type Expected struct { - rule *models.FirewallRule - err error - } - - cases := []struct { - description string - id string - rule models.FirewallRuleUpdate - fixtures []string - expected Expected - }{ - { - description: "fails when firewall rule is not found", - id: "6504b7bd9b6c4a63a9ccc000", - rule: models.FirewallRuleUpdate{ - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 1, - Action: "deny", - Active: true, - SourceIP: ".*", - Username: ".*", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{"editedtag"}, - }, - }, - }, - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - rule: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "succeeds when firewall rule is found", - id: "6504b7bd9b6c4a63a9ccc053", - rule: models.FirewallRuleUpdate{ - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 1, - Action: "deny", - Active: true, - SourceIP: ".*", - Username: ".*", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{"editedtag"}, - }, - }, - }, - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: Expected{ - rule: &models.FirewallRule{ - ID: "6504b7bd9b6c4a63a9ccc053", - TenantID: "00000000-0000-4000-0000-000000000000", - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 1, - Action: "deny", - Active: true, - SourceIP: ".*", - Username: ".*", - Filter: models.FirewallFilter{ - Hostname: "", - Tags: []string{"editedtag"}, - }, - }, - }, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - rule, err := mongostore.FirewallRuleUpdate(context.TODO(), tc.id, tc.rule) - assert.Equal(t, tc.expected, Expected{rule: rule, err: err}) - }) - } -} - -func TestFirewallRuleDelete(t *testing.T) { - cases := []struct { - description string - id string - fixtures []string - expected error - }{ - { - description: "fails when rule is not found", - id: "6504ac006bf3dbca079f76b1", - fixtures: []string{}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when rule is found", - id: "6504b7bd9b6c4a63a9ccc053", - fixtures: []string{fixtures.FixtureFirewallRules}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.FirewallRuleDelete(context.TODO(), tc.id) - assert.Equal(t, tc.expected, err) - }) - } -} diff --git a/api/store/mongo/fixtures/active_sessions.json b/api/store/mongo/fixtures/active_sessions.json new file mode 100644 index 00000000000..33111811f9a --- /dev/null +++ b/api/store/mongo/fixtures/active_sessions.json @@ -0,0 +1,9 @@ +{ + "active_sessions": { + "650a1c1b3b3bb3a0f8e9bf43": { + "last_seen": "2023-01-01T12:00:00.000Z", + "uid": "a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68", + "tenant_id": "00000000-0000-4000-0000-000000000000" + } + } +} diff --git a/api/store/mongo/fixtures/api-key.json b/api/store/mongo/fixtures/api-key.json new file mode 100644 index 00000000000..313904cd156 --- /dev/null +++ b/api/store/mongo/fixtures/api-key.json @@ -0,0 +1,22 @@ +{ + "api_keys": { + "f23a2e56cd3fcfba002c72675c870e1e7813292adc40bbf14cea479a2e07976a": { + "name": "dev", + "created_by": "507f1f77bcf86cd799439011", + "tenant_id": "00000000-0000-4000-0000-000000000000", + "role": "admin", + "created_at": "2023-01-01T12:00:00.000Z", + "updated_at": "2023-01-01T12:00:00.000Z", + "expires_in": 0 + }, + "a1b2c73ea41f70870c035283336d72228118213ed03ec78043ffee48d827af11": { + "name": "prod", + "created_by": "507f1f77bcf86cd799439011", + "tenant_id": "00000000-0000-4000-0000-000000000000", + "role": "operator", + "created_at": "2023-01-02T12:00:00.000Z", + "updated_at": "2023-01-02T12:00:00.000Z", + "expires_in": 10 + } + } +} diff --git a/api/pkg/fixtures/data/devices.json b/api/store/mongo/fixtures/devices.json similarity index 83% rename from api/pkg/fixtures/data/devices.json rename to api/store/mongo/fixtures/devices.json index fe81e73d1a0..5c6fc4077df 100644 --- a/api/pkg/fixtures/data/devices.json +++ b/api/store/mongo/fixtures/devices.json @@ -2,7 +2,9 @@ "devices": { "656f605bafb652df9927adef": { "created_at": "2023-01-01T12:00:00.000Z", + "removed_at": null, "last_seen": "2023-01-01T12:00:00.000Z", + "disconnected_at": null, "status_updated_at": "2023-01-01T12:00:00.000Z", "identity": { "mac": "mac-1" @@ -13,15 +15,17 @@ "public_key": "", "remote_addr": "", "status": "accepted", - "tags": [ - "tag-1" + "tag_ids": [ + "6791d3ae04ba86e6d7a0514d" ], "tenant_id": "00000000-0000-4000-0000-000000000000", "uid": "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f" }, "656f608671625f495fe70037": { "created_at": "2023-01-02T12:00:00.000Z", + "removed_at": null, "last_seen": "2023-01-02T12:00:00.000Z", + "disconnected_at": null, "status_updated_at": "2023-01-02T12:00:00.000Z", "identity": { "mac": "mac-2" @@ -32,13 +36,15 @@ "public_key": "", "remote_addr": "", "status": "accepted", - "tags": [], + "tag_ids": [], "tenant_id": "00000000-0000-4000-0000-000000000000", "uid": "4300430e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809e" }, "6500c8f026e1e911042ee820": { "created_at": "2023-01-03T12:00:00.000Z", + "removed_at": null, "last_seen": "2023-01-03T12:00:00.000Z", + "disconnected_at": null, "status_updated_at": "2023-01-03T12:00:00.000Z", "identity": { "mac": "mac-3" @@ -49,15 +55,18 @@ "public_key": "", "remote_addr": "", "status": "accepted", - "tags": [ - "tag-1" + "tag_ids": [ + "6791d3ae04ba86e6d7a0514d", + "6791d3be5a201d874c4c2885" ], "tenant_id": "00000000-0000-4000-0000-000000000000", "uid": "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c" }, "656f60cada27dad292686e34": { "created_at": "2023-01-04T12:00:00.000Z", + "removed_at": null, "last_seen": "2023-01-04T12:00:00.000Z", + "disconnected_at": null, "status_updated_at": "2023-01-04T12:00:00.000Z", "identity": { "mac": "mac-4" @@ -68,7 +77,7 @@ "public_key": "", "remote_addr": "", "status": "pending", - "tags": [], + "tag_ids": [], "tenant_id": "00000000-0000-4000-0000-000000000000", "uid": "3300330e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809d" } diff --git a/api/store/mongo/fixtures/devices_with_removed.json b/api/store/mongo/fixtures/devices_with_removed.json new file mode 100644 index 00000000000..9ea2203b5f3 --- /dev/null +++ b/api/store/mongo/fixtures/devices_with_removed.json @@ -0,0 +1,42 @@ +{ + "devices": { + "656f605bafb652df9927adef": { + "created_at": "2023-01-01T12:00:00.000Z", + "removed_at": null, + "last_seen": "2023-01-01T12:00:00.000Z", + "disconnected_at": null, + "status_updated_at": "2023-01-01T12:00:00.000Z", + "identity": { + "mac": "mac-1" + }, + "info": null, + "name": "device-1", + "position": null, + "public_key": "", + "remote_addr": "", + "status": "accepted", + "tag_ids": [], + "tenant_id": "00000000-0000-4000-0000-000000000000", + "uid": "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f" + }, + "656f60dbda27dad292686e35": { + "created_at": "2023-01-05T12:00:00.000Z", + "removed_at": "2023-01-06T12:00:00.000Z", + "last_seen": "2023-01-05T12:00:00.000Z", + "disconnected_at": null, + "status_updated_at": "2023-01-06T12:00:00.000Z", + "identity": { + "mac": "mac-removed" + }, + "info": null, + "name": "device-removed", + "position": null, + "public_key": "", + "remote_addr": "", + "status": "removed", + "tag_ids": [], + "tenant_id": "00000000-0000-4000-0000-000000000000", + "uid": "6600660e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809a" + } + } +} diff --git a/api/pkg/fixtures/data/firewall_rules.json b/api/store/mongo/fixtures/firewall_rules.json similarity index 80% rename from api/pkg/fixtures/data/firewall_rules.json rename to api/store/mongo/fixtures/firewall_rules.json index fb1f687eaba..61b40aca650 100644 --- a/api/pkg/fixtures/data/firewall_rules.json +++ b/api/store/mongo/fixtures/firewall_rules.json @@ -4,7 +4,9 @@ "action": "allow", "active": true, "filter": { - "tags": ["tag-1"] + "tag_ids": [ + "6791d3ae04ba86e6d7a0514d" + ] }, "priority": 1, "source_ip": ".*", @@ -15,7 +17,9 @@ "action": "allow", "active": true, "filter": { - "tags": ["tag-1"] + "tag_ids": [ + "6791d3be5a201d874c4c2885" + ] }, "priority": 2, "source_ip": "192.168.1.10", @@ -26,7 +30,7 @@ "action": "allow", "active": true, "filter": { - "tags": [] + "tag_ids": [] }, "priority": 3, "source_ip": "10.0.0.0/24", @@ -37,7 +41,9 @@ "action": "deny", "active": true, "filter": { - "tags": ["tag-1"] + "tag_ids": [ + "6791d3ae04ba86e6d7a0514d" + ] }, "priority": 4, "source_ip": "172.16.0.0/16", diff --git a/api/store/mongo/fixtures/membership_invitations.json b/api/store/mongo/fixtures/membership_invitations.json new file mode 100644 index 00000000000..1414953a83c --- /dev/null +++ b/api/store/mongo/fixtures/membership_invitations.json @@ -0,0 +1,40 @@ +{ + "membership_invitations": { + "507f1f77bcf86cd799439012": { + "tenant_id": "00000000-0000-4000-0000-000000000000", + "user_id": "6509e169ae6144b2f56bf288", + "invited_by": "507f1f77bcf86cd799439011", + "role": "observer", + "status": "pending", + "created_at": "2023-01-01T12:00:00.000Z", + "updated_at": "2023-01-02T12:00:00.000Z", + "status_updated_at": "2023-01-01T12:00:00.000Z", + "expires_at": "2023-01-08T12:00:00.000Z", + "invitations": 1 + }, + "507f1f77bcf86cd799439013": { + "tenant_id": "00000000-0000-4001-0000-000000000000", + "user_id": "608f32a2c7351f001f6475e0", + "invited_by": "6509e169ae6144b2f56bf288", + "role": "administrator", + "status": "accepted", + "created_at": "2023-01-05T12:00:00.000Z", + "updated_at": "2023-01-06T12:00:00.000Z", + "status_updated_at": "2023-01-06T12:00:00.000Z", + "expires_at": "2023-01-12T12:00:00.000Z", + "invitations": 2 + }, + "507f1f77bcf86cd799439014": { + "tenant_id": "00000000-0000-4000-0000-000000000000", + "user_id": "507f1f77bcf86cd799439011", + "invited_by": "6509e169ae6144b2f56bf288", + "role": "observer", + "status": "pending", + "created_at": "2023-01-07T12:00:00.000Z", + "updated_at": "2023-01-07T12:00:00.000Z", + "status_updated_at": "2023-01-07T12:00:00.000Z", + "expires_at": "2023-01-14T12:00:00.000Z", + "invitations": 1 + } + } +} diff --git a/api/store/mongo/fixtures/namespaces.json b/api/store/mongo/fixtures/namespaces.json new file mode 100644 index 00000000000..81ac29e8156 --- /dev/null +++ b/api/store/mongo/fixtures/namespaces.json @@ -0,0 +1,98 @@ +{ + "namespaces": { + "6500c8f086353a36732d544a": { + "created_at": "2023-01-01T12:00:00.000Z", + "max_devices": -1, + "members": [ + { + "id": "507f1f77bcf86cd799439011", + "added_at": "2023-01-01T12:00:00.000Z", + "role": "owner" + }, + { + "id": "6509e169ae6144b2f56bf288", + "added_at": "2023-01-01T12:00:00.000Z", + "role": "observer" + } + ], + "name": "namespace-1", + "owner": "507f1f77bcf86cd799439011", + "settings": { + "session_record": true + }, + "tenant_id": "00000000-0000-4000-0000-000000000000", + "devices_accepted_count": 15, + "devices_pending_count": 3, + "devices_rejected_count": 2, + "devices_removed_count": 1 + }, + "e5b45d9a2c0a4f8dbb7f4e5d": { + "created_at": "2023-01-01T12:00:00.000Z", + "max_devices": 10, + "members": [ + { + "id": "6509e169ae6144b2f56bf288", + "added_at": "2023-01-01T12:00:00.000Z", + "role": "owner" + }, + { + "id": "907f1f77bcf86cd799439022", + "added_at": "2023-01-01T12:00:00.000Z", + "role": "operator" + } + ], + "name": "namespace-2", + "owner": "6509e169ae6144b2f56bf288", + "settings": { + "session_record": false + }, + "tenant_id": "00000000-0000-4001-0000-000000000000", + "devices_accepted_count": 8, + "devices_pending_count": 1, + "devices_rejected_count": 0, + "devices_removed_count": 2 + }, + "3c7f09a5b46c4a63a9ccc071": { + "created_at": "2023-01-01T12:00:00.000Z", + "max_devices": 3, + "members": [ + { + "id": "657b0e3bff780d625f74e49a", + "added_at": "2023-01-01T12:00:00.000Z", + "role": "owner" + } + ], + "name": "namespace-3", + "owner": "657b0e3bff780d625f74e49a", + "settings": { + "session_record": true + }, + "tenant_id": "00000000-0000-4002-0000-000000000000", + "devices_accepted_count": 342, + "devices_pending_count": 0, + "devices_rejected_count": 2, + "devices_removed_count": 4 + }, + "6577271b9f5a02f3bc8f5400": { + "created_at": "2023-01-01T12:00:00.000Z", + "max_devices": -1, + "members": [ + { + "id": "6577267d8752d05270a4c07d", + "added_at": "2023-01-01T12:00:00.000Z", + "role": "owner" + } + ], + "name": "namespace-4", + "owner": "6577267d8752d05270a4c07d", + "settings": { + "session_record": true + }, + "tenant_id": "00000000-0000-4003-0000-000000000000", + "devices_accepted_count": 25, + "devices_pending_count": 5, + "devices_rejected_count": 3, + "devices_removed_count": 0 + } + } +} diff --git a/api/pkg/fixtures/data/private_keys.json b/api/store/mongo/fixtures/private_keys.json similarity index 100% rename from api/pkg/fixtures/data/private_keys.json rename to api/store/mongo/fixtures/private_keys.json diff --git a/api/store/mongo/fixtures/public_keys.json b/api/store/mongo/fixtures/public_keys.json new file mode 100644 index 00000000000..bba57be4fe6 --- /dev/null +++ b/api/store/mongo/fixtures/public_keys.json @@ -0,0 +1,18 @@ +{ + "public_keys": { + "65088c97a3efce71bf6e1f32": { + "created_at": "2023-01-01T12:00:00.000Z", + "data": "test", + "filter": { + "hostname": ".*", + "tag_ids": [ + "6791d3ae04ba86e6d7a0514d", + "6791d3be5a201d874c4c2885" + ] + }, + "fingerprint": "fingerprint", + "name": "public_key", + "tenant_id": "00000000-0000-4000-0000-000000000000" + } + } +} diff --git a/api/pkg/fixtures/data/recorded_sessions.json b/api/store/mongo/fixtures/recorded_sessions.json similarity index 100% rename from api/pkg/fixtures/data/recorded_sessions.json rename to api/store/mongo/fixtures/recorded_sessions.json diff --git a/api/pkg/fixtures/data/recovery_tokens.json b/api/store/mongo/fixtures/recovery_tokens.json similarity index 100% rename from api/pkg/fixtures/data/recovery_tokens.json rename to api/store/mongo/fixtures/recovery_tokens.json diff --git a/api/pkg/fixtures/data/sessions.json b/api/store/mongo/fixtures/sessions.json similarity index 100% rename from api/pkg/fixtures/data/sessions.json rename to api/store/mongo/fixtures/sessions.json diff --git a/api/store/mongo/fixtures/tags.json b/api/store/mongo/fixtures/tags.json new file mode 100644 index 00000000000..5796c95ac2a --- /dev/null +++ b/api/store/mongo/fixtures/tags.json @@ -0,0 +1,22 @@ +{ + "tags": { + "6791d3ae04ba86e6d7a0514d": { + "created_at": "2023-01-01T12:00:00.000Z", + "updated_at": "2023-01-01T12:00:00.000Z", + "name": "production", + "tenant_id": "00000000-0000-4000-0000-000000000000" + }, + "6791d3be5a201d874c4c2885": { + "created_at": "2023-01-01T12:00:00.000Z", + "updated_at": "2023-01-01T12:00:00.000Z", + "name": "development", + "tenant_id": "00000000-0000-4000-0000-000000000000" + }, + "6791d3c2a62aafaefe821ab3": { + "created_at": "2023-01-01T12:00:00.000Z", + "updated_at": "2023-01-01T12:00:00.000Z", + "name": "owners", + "tenant_id": "00000000-0000-4001-0000-000000000000" + } + } +} diff --git a/api/store/mongo/fixtures/user_invitations.json b/api/store/mongo/fixtures/user_invitations.json new file mode 100644 index 00000000000..f489da92e7b --- /dev/null +++ b/api/store/mongo/fixtures/user_invitations.json @@ -0,0 +1,11 @@ +{ + "user_invitations": { + "507f1f77bcf86cd799439011": { + "email": "jane.doe@test.com", + "created_at": "2023-01-01T12:00:00.000Z", + "updated_at": "2023-01-02T12:00:00.000Z", + "invitations": 2, + "status": "pending" + } + } +} diff --git a/api/store/mongo/fixtures/users.json b/api/store/mongo/fixtures/users.json new file mode 100644 index 00000000000..675ff542734 --- /dev/null +++ b/api/store/mongo/fixtures/users.json @@ -0,0 +1,59 @@ +{ + "users": { + "507f1f77bcf86cd799439011": { + "status": "confirmed", + "created_at": "2023-01-01T12:00:00.000Z", + "last_login": "2023-01-01T12:00:00.000Z", + "email": "john.doe@test.com", + "email_marketing": true, + "max_namespaces": 0, + "name": "john doe", + "password": "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", + "username": "john_doe" + }, + "608f32a2c7351f001f6475e0": { + "status": "confirmed", + "created_at": "2023-01-02T12:00:00.000Z", + "last_login": "2023-01-02T12:00:00.000Z", + "email": "jane.smith@test.com", + "email_marketing": true, + "max_namespaces": 3, + "name": "Jane Smith", + "password": "a0b8c29f4c8d57e542f5e81d35ebe801fd27f569f116fe670e8962d798512a1d", + "username": "jane_smith" + }, + "709f45b5e812c1002f3a67e7": { + "status": "confirmed", + "created_at": "2023-01-03T12:00:00.000Z", + "last_login": "2023-01-03T12:00:00.000Z", + "email": "bob.johnson@test.com", + "email_marketing": true, + "max_namespaces": 10, + "name": "Bob Johnson", + "password": "5f3b3956a1a150b73e6b27e674f27d7aeb01ab1a40c179c3e1aa6026a36655a2", + "username": "bob_johnson" + }, + "80fdcea1d7299c002f3a67e8": { + "status": "not-confirmed", + "created_at": "2023-01-04T12:00:00.000Z", + "last_login": null, + "email": "alex.rodriguez@test.com", + "email_marketing": false, + "max_namespaces": 3, + "name": "Alex Rodriguez", + "password": "c5093eb98678c7a3324825b84c6b67c1127b93786482ddbbd356e67e29b2763f", + "username": "alex_rodriguez" + }, + "6509e169ae6144b2f56bf288": { + "status": "confirmed", + "created_at": "2023-01-05T12:00:00.000Z", + "last_login": "2023-01-05T12:00:00.000Z", + "email": "maria.garcia@test.com", + "email_marketing": true, + "max_namespaces": 5, + "name": "Maria Garcia", + "password": "c2301b2b7e872843b473d2c301e4fb2e6e9f27f2e7a1b6ad44a3b2c97f1670b3", + "username": "maria_garcia" + } + } +} diff --git a/api/store/mongo/internal/filters.go b/api/store/mongo/internal/filters.go new file mode 100644 index 00000000000..9ada613a315 --- /dev/null +++ b/api/store/mongo/internal/filters.go @@ -0,0 +1,146 @@ +package internal + +import ( + "errors" + "fmt" + "strconv" + "time" + + "github.com/shellhub-io/shellhub/pkg/api/query" + "go.mongodb.org/mongo-driver/bson" +) + +// ParseFilterOperator constructs the filter operator, returning its Bson representation and a boolean +// indicating whether the operator is valid or not. +func ParseFilterOperator(fo *query.FilterOperator) (string, bool) { + validProperties := []string{"and", "or"} + for _, op := range validProperties { + if op == fo.Name { + return fmt.Sprintf("$%s", fo.Name), true + } + } + + return "", false +} + +// ParseFilterProperty constructs the property, returning the BSON representation of the property, a boolean +// indicating whether the operator is valid or not, and an error if any. +func ParseFilterProperty(fp *query.FilterProperty) (bson.M, bool, error) { + var res bson.M + var err error + var ok bool + + switch fp.Operator { + case "contains": + res, err = fromContains(fp.Value) + ok = true + case "eq": + res, err = fromEq(fp.Value) + ok = true + case "bool": + res, err = fromBool(fp.Value) + ok = true + case "gt": + res, err = fromGt(fp.Value) + ok = true + case "lt": + res, err = fromLt(fp.Value) + ok = true + case "ne": + res, err = fromNe(fp.Value) + ok = true + default: + return nil, false, nil + } + + return res, ok, err +} + +// fromContains converts a "contains" JSON expression to a Bson expression using "$regex" or "$all". +func fromContains(value interface{}) (bson.M, error) { + switch value.(type) { + case string: + return bson.M{"$regex": value, "$options": "i"}, nil + case []interface{}: + return bson.M{"$all": value}, nil + } + + return nil, errors.New("invalid value type for fromContains") +} + +// fromEq converts an "eq" JSON expression to a Bson expression using "$eq". +func fromEq(value interface{}) (bson.M, error) { + return bson.M{"$eq": value}, nil +} + +// fromBool converts a "bool" JSON expression to a Bson expression using "$eq" for comparing boolean values. +func fromBool(value interface{}) (bson.M, error) { + switch v := value.(type) { + case int: + value = v != 0 + case string: + var err error + value, err = strconv.ParseBool(v) + if err != nil { + return nil, err + } + } + + return bson.M{"$eq": value}, nil +} + +// fromGt converts a "gt" JSON expression to a Bson expression using "$gt". +func fromGt(value interface{}) (bson.M, error) { + switch v := value.(type) { + case int: + return bson.M{"$gt": v}, nil + case int64: + return bson.M{"$gt": v}, nil + case float64: + return bson.M{"$gt": v}, nil + case string: + if intVal, err := strconv.Atoi(v); err == nil { + return bson.M{"$gt": intVal}, nil + } + + if timeVal, err := time.Parse(time.RFC3339, v); err == nil { + return bson.M{"$gt": timeVal}, nil + } + + return bson.M{"$gt": v}, nil + case time.Time: + return bson.M{"$gt": v}, nil + default: + return bson.M{"$gt": value}, nil + } +} + +// fromLt converts a "lt" JSON expression to a Bson expression using "$lt". +func fromLt(value interface{}) (bson.M, error) { + switch v := value.(type) { + case int: + return bson.M{"$lt": v}, nil + case int64: + return bson.M{"$lt": v}, nil + case float64: + return bson.M{"$lt": v}, nil + case string: + if intVal, err := strconv.Atoi(v); err == nil { + return bson.M{"$lt": intVal}, nil + } + + if timeVal, err := time.Parse(time.RFC3339, v); err == nil { + return bson.M{"$lt": timeVal}, nil + } + + return bson.M{"$lt": v}, nil + case time.Time: + return bson.M{"$lt": v}, nil + default: + return bson.M{"$lt": value}, nil + } +} + +func fromNe(value interface{}) (bson.M, error) { + return bson.M{"$ne": value}, nil +} diff --git a/api/store/mongo/license.go b/api/store/mongo/license.go deleted file mode 100644 index 9c2ef333b1b..00000000000 --- a/api/store/mongo/license.go +++ /dev/null @@ -1,27 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/shellhub-io/shellhub/pkg/models" - "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/mongo/options" -) - -func (s *Store) LicenseLoad(ctx context.Context) (*models.License, error) { - findOpts := options.FindOne() - findOpts.SetSort(bson.M{"created_at": -1}) - - license := new(models.License) - if err := s.db.Collection("licenses").FindOne(ctx, bson.M{}, findOpts).Decode(&license); err != nil { - return nil, FromMongoError(err) - } - - return license, nil -} - -func (s *Store) LicenseSave(ctx context.Context, license *models.License) error { - _, err := s.db.Collection("licenses").InsertOne(ctx, license) - - return FromMongoError(err) -} diff --git a/api/store/mongo/license_test.go b/api/store/mongo/license_test.go deleted file mode 100644 index bfef36b4573..00000000000 --- a/api/store/mongo/license_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package mongo - -import ( - "context" - "testing" - "time" - - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/assert" -) - -func TestLicenseLoad(t *testing.T) { - type Expected struct { - license *models.License - err error - } - - cases := []struct { - description string - fixtures []string - expected Expected - }{ - { - description: "fails when license is not found", - fixtures: []string{}, - expected: Expected{ - license: nil, - err: store.ErrNoDocuments, - }, - }, - { - description: "succeeds when license is found", - fixtures: []string{fixtures.FixtureLicenses}, - expected: Expected{ - license: &models.License{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - RawData: []byte("test"), - }, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - license, err := mongostore.LicenseLoad(context.TODO()) - assert.Equal(t, tc.expected, Expected{license: license, err: err}) - }) - } -} - -func TestLicenseSave(t *testing.T) { - cases := []struct { - description string - license *models.License - fixtures []string - expected error - }{ - { - description: "succeeds when data is valid", - license: &models.License{ - RawData: []byte("test"), - CreatedAt: time.Now(), - }, - fixtures: []string{}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.LicenseSave(context.TODO(), tc.license) - assert.Equal(t, tc.expected, err) - }) - } -} diff --git a/api/store/mongo/member.go b/api/store/mongo/member.go new file mode 100644 index 00000000000..efbc5ada6c8 --- /dev/null +++ b/api/store/mongo/member.go @@ -0,0 +1,116 @@ +package mongo + +import ( + "context" + "strings" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" +) + +func (s *Store) NamespaceCreateMembership(ctx context.Context, tenantID string, member *models.Member) error { + err := s.db. + Collection("namespaces"). + FindOne(ctx, bson.M{"tenant_id": tenantID, "members": bson.M{"$elemMatch": bson.M{"id": member.ID}}}). + Err() + if err == nil { + return ErrNamespaceDuplicatedMember + } + + memberBson := bson.M{ + "id": member.ID, + "added_at": member.AddedAt, + "role": member.Role, + } + + res, err := s.db. + Collection("namespaces"). + UpdateOne(ctx, bson.M{"tenant_id": tenantID}, bson.M{"$addToSet": bson.M{"members": memberBson}}) + if err != nil { + return FromMongoError(err) + } + + if res.MatchedCount < 1 { + return store.ErrNoDocuments + } + + if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { + log.Error(err) + } + + return nil +} + +func (s *Store) NamespaceUpdateMembership(ctx context.Context, tenantID string, member *models.Member) error { + filter := bson.M{"tenant_id": tenantID, "members": bson.M{"$elemMatch": bson.M{"id": member.ID}}} + + memberBson := bson.M{ + "members.$.id": member.ID, + "members.$.added_at": member.AddedAt, + "members.$.role": member.Role, + } + + ns, err := s.db.Collection("namespaces").UpdateOne(ctx, filter, bson.M{"$set": memberBson}) + if err != nil { + return FromMongoError(err) + } + + if ns.MatchedCount < 1 { + return ErrUserNotFound + } + + if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { + log.Error(err) + } + + return nil +} + +func (s *Store) NamespaceDeleteMembership(ctx context.Context, tenantID string, member *models.Member) error { + session, err := s.db.Client().StartSession() + if err != nil { + return err + } + defer session.EndSession(ctx) + + fn := func(_ mongo.SessionContext) (any, error) { + res, err := s.db. + Collection("namespaces"). + UpdateOne(ctx, bson.M{"tenant_id": tenantID}, bson.M{"$pull": bson.M{"members": bson.M{"id": member.ID}}}) + if err != nil { + return nil, FromMongoError(err) + } + + switch { + case res.MatchedCount < 1: // tenant not found + return nil, store.ErrNoDocuments + case res.ModifiedCount < 1: // member not found + return nil, ErrUserNotFound + } + + objID, err := primitive.ObjectIDFromHex(member.ID) + if err != nil { + return nil, err + } + + _, err = s.db. + Collection("users"). + UpdateOne(ctx, bson.M{"_id": objID, "preferred_namespace": tenantID}, bson.M{"$set": bson.M{"preferred_namespace": ""}}) + + return nil, FromMongoError(err) + } + + if _, err := session.WithTransaction(ctx, fn); err != nil { + return err + } + + if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { + log.Error(err) + } + + return nil +} diff --git a/api/store/mongo/member_test.go b/api/store/mongo/member_test.go new file mode 100644 index 00000000000..997ef26080d --- /dev/null +++ b/api/store/mongo/member_test.go @@ -0,0 +1,201 @@ +package mongo_test + +import ( + "context" + "testing" + "time" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/api/store/mongo" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson" +) + +func TestNamespaceCreateMembership(t *testing.T) { + type Expected struct { + err error + } + + cases := []struct { + description string + tenantID string + member *models.Member + fixtures []string + expected Expected + }{ + { + description: "fails when tenant is not found", + tenantID: "nonexistent", + member: &models.Member{ + ID: "6509de884238881ac1b2b289", + Role: authorizer.RoleObserver, + }, + fixtures: []string{fixtureNamespaces}, + expected: Expected{err: store.ErrNoDocuments}, + }, + { + description: "fails when member has already been added", + tenantID: "00000000-0000-4000-0000-000000000000", + member: &models.Member{ + ID: "6509e169ae6144b2f56bf288", + Role: authorizer.RoleObserver, + }, + fixtures: []string{fixtureNamespaces}, + expected: Expected{err: mongo.ErrNamespaceDuplicatedMember}, + }, + { + description: "succeeds when tenant is found", + tenantID: "00000000-0000-4000-0000-000000000000", + member: &models.Member{ + ID: "6509de884238881ac1b2b289", + Role: authorizer.RoleObserver, + }, + fixtures: []string{fixtureNamespaces}, + expected: Expected{err: nil}, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + if err := s.NamespaceCreateMembership(ctx, tc.tenantID, tc.member); tc.expected.err != nil { + require.Equal(t, tc.expected.err, err) + + return + } + + require.NoError(t, db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": tc.tenantID, "members.id": tc.member.ID}).Err()) + }) + } +} + +func TestNamespaceUpdateMembership(t *testing.T) { + type Expected struct { + err error + } + + cases := []struct { + description string + tenantID string + member *models.Member + fixtures []string + expected Expected + }{ + { + description: "fails when user is not found", + tenantID: "00000000-0000-4000-0000-000000000000", + member: &models.Member{ + ID: "000000000000000000000000", + Role: authorizer.RoleObserver, + }, + fixtures: []string{fixtureNamespaces}, + expected: Expected{err: mongo.ErrUserNotFound}, + }, + { + description: "succeeds when tenant and user is found", + tenantID: "00000000-0000-4000-0000-000000000000", + member: &models.Member{ + ID: "6509e169ae6144b2f56bf288", + Role: authorizer.RoleAdministrator, + AddedAt: time.Now(), + }, + fixtures: []string{fixtureNamespaces}, + expected: Expected{err: nil}, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + if err := s.NamespaceUpdateMembership(ctx, tc.tenantID, tc.member); tc.expected.err != nil { + require.Equal(t, tc.expected.err, err) + + return + } + + namespace := new(models.Namespace) + require.NoError(t, db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": tc.tenantID, "members.id": tc.member.ID}).Decode(namespace)) + require.Equal(t, 2, len(namespace.Members)) + require.Equal(t, tc.member.ID, namespace.Members[1].ID) + require.Equal(t, tc.member.Role, namespace.Members[1].Role) + }) + } +} + +func TestNamespaceDeleteMembership(t *testing.T) { + type Expected struct { + err error + } + + cases := []struct { + description string + tenantID string + member *models.Member + fixtures []string + expected Expected + }{ + { + description: "fails when tenant is not found", + tenantID: "nonexistent", + member: &models.Member{ + ID: "6509de884238881ac1b2b289", + }, + fixtures: []string{fixtureNamespaces}, + expected: Expected{err: store.ErrNoDocuments}, + }, + { + description: "fails when member is not found", + tenantID: "00000000-0000-4000-0000-000000000000", + member: &models.Member{ + ID: "nonexistent", + }, + fixtures: []string{fixtureNamespaces}, + expected: Expected{err: mongo.ErrUserNotFound}, + }, + { + description: "succeeds when tenant and user is found", + tenantID: "00000000-0000-4000-0000-000000000000", + member: &models.Member{ + ID: "6509e169ae6144b2f56bf288", + }, + fixtures: []string{fixtureNamespaces}, + expected: Expected{err: nil}, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + if err := s.NamespaceDeleteMembership(ctx, tc.tenantID, tc.member); tc.expected.err != nil { + require.Equal(t, tc.expected.err, err) + + return + } + + namespace := new(models.Namespace) + require.NoError(t, db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": tc.tenantID}).Decode(namespace)) + require.Equal(t, 1, len(namespace.Members)) + }) + } +} diff --git a/api/store/mongo/membership-invitation.go b/api/store/mongo/membership-invitation.go new file mode 100644 index 00000000000..2b0d6b11379 --- /dev/null +++ b/api/store/mongo/membership-invitation.go @@ -0,0 +1,146 @@ +package mongo + +import ( + "context" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/models" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func (s *Store) MembershipInvitationCreate(ctx context.Context, invitation *models.MembershipInvitation) error { + now := clock.Now() + invitation.CreatedAt = now + invitation.UpdatedAt = now + invitation.StatusUpdatedAt = now + + bsonBytes, err := bson.Marshal(invitation) + if err != nil { + return FromMongoError(err) + } + + doc := make(bson.M) + if err := bson.Unmarshal(bsonBytes, &doc); err != nil { + return FromMongoError(err) + } + + objID := primitive.NewObjectID() + doc["_id"] = objID + doc["user_id"], _ = primitive.ObjectIDFromHex(invitation.UserID) + doc["invited_by"], _ = primitive.ObjectIDFromHex(invitation.InvitedBy) + + if _, err := s.db.Collection("membership_invitations").InsertOne(ctx, doc); err != nil { + return FromMongoError(err) + } + + invitation.ID = objID.Hex() + + return nil +} + +func (s *Store) MembershipInvitationResolve(ctx context.Context, tenantID, userID string) (*models.MembershipInvitation, error) { + userObjID, _ := primitive.ObjectIDFromHex(userID) + + pipeline := []bson.M{ + { + "$match": bson.M{"tenant_id": tenantID, "user_id": userObjID}, + }, + { + "$sort": bson.D{{Key: "_id", Value: -1}}, + }, + { + "$limit": 1, + }, + { + "$lookup": bson.M{ + "from": "namespaces", + "localField": "tenant_id", + "foreignField": "tenant_id", + "as": "namespace", + }, + }, + { + "$lookup": bson.M{ + "from": "users", + "localField": "user_id", + "foreignField": "_id", + "as": "user", + }, + }, + { + "$lookup": bson.M{ + "from": "user_invitations", + "localField": "user_id", + "foreignField": "_id", + "as": "user_invitation", + }, + }, + { + "$addFields": bson.M{ + "namespace_name": bson.M{"$arrayElemAt": bson.A{"$namespace.name", 0}}, + "user_email": bson.M{ + "$ifNull": bson.A{ + bson.M{"$arrayElemAt": bson.A{"$user.email", 0}}, + bson.M{"$arrayElemAt": bson.A{"$user_invitation.email", 0}}, + }, + }, + }, + }, + { + "$project": bson.M{ + "namespace": 0, + "user": 0, + "user_invitation": 0, + }, + }, + } + + cursor, err := s.db.Collection("membership_invitations").Aggregate(ctx, pipeline) + if err != nil { + return nil, FromMongoError(err) + } + defer cursor.Close(ctx) + + if !cursor.Next(ctx) { + return nil, store.ErrNoDocuments + } + + invitation := &models.MembershipInvitation{} + if err := cursor.Decode(invitation); err != nil { + return nil, FromMongoError(err) + } + + return invitation, nil +} + +func (s *Store) MembershipInvitationUpdate(ctx context.Context, invitation *models.MembershipInvitation) error { + invitation.UpdatedAt = clock.Now() + + bsonBytes, err := bson.Marshal(invitation) + if err != nil { + return FromMongoError(err) + } + + doc := make(bson.M) + if err := bson.Unmarshal(bsonBytes, &doc); err != nil { + return FromMongoError(err) + } + + delete(doc, "_id") + doc["user_id"], _ = primitive.ObjectIDFromHex(invitation.UserID) + doc["invited_by"], _ = primitive.ObjectIDFromHex(invitation.InvitedBy) + + objID, _ := primitive.ObjectIDFromHex(invitation.ID) + r, err := s.db.Collection("membership_invitations").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$set": doc}) + if err != nil { + return FromMongoError(err) + } + + if r.MatchedCount == 0 { + return store.ErrNoDocuments + } + + return nil +} diff --git a/api/store/mongo/membership-invitation_test.go b/api/store/mongo/membership-invitation_test.go new file mode 100644 index 00000000000..fbcc5261884 --- /dev/null +++ b/api/store/mongo/membership-invitation_test.go @@ -0,0 +1,295 @@ +package mongo_test + +import ( + "context" + "testing" + "time" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestStore_MembershipInvitationCreate(t *testing.T) { + mockClock := new(clockmock.Clock) + clock.DefaultBackend = mockClock + + now := time.Now() + mockClock.On("Now").Return(now) + expiresAt := now.Add(7 * 24 * time.Hour) + + cases := []struct { + description string + invitation *models.MembershipInvitation + fixtures []string + expected map[string]any + }{ + { + description: "succeeds creating new invitation", + invitation: &models.MembershipInvitation{ + TenantID: "00000000-0000-4000-0000-000000000000", + UserID: "6509e169ae6144b2f56bf288", + InvitedBy: "507f1f77bcf86cd799439011", + Role: authorizer.RoleObserver, + Status: models.MembershipInvitationStatusPending, + ExpiresAt: &expiresAt, + Invitations: 1, + }, + fixtures: []string{}, + expected: map[string]any{ + "tenant_id": "00000000-0000-4000-0000-000000000000", + "role": "observer", + "status": "pending", + "created_at": primitive.NewDateTimeFromTime(now), + "updated_at": primitive.NewDateTimeFromTime(now), + "status_updated_at": primitive.NewDateTimeFromTime(now), + "invitations": int32(1), + }, + }, + { + description: "succeeds creating invitation with ID", + invitation: &models.MembershipInvitation{ + ID: "507f1f77bcf86cd799439020", + TenantID: "00000000-0000-4001-0000-000000000000", + UserID: "907f1f77bcf86cd799439022", + InvitedBy: "6509e169ae6144b2f56bf288", + Role: authorizer.RoleAdministrator, + Status: models.MembershipInvitationStatusAccepted, + ExpiresAt: &expiresAt, + Invitations: 2, + }, + fixtures: []string{}, + expected: map[string]any{ + "tenant_id": "00000000-0000-4001-0000-000000000000", + "role": "administrator", + "status": "accepted", + "created_at": primitive.NewDateTimeFromTime(now), + "updated_at": primitive.NewDateTimeFromTime(now), + "status_updated_at": primitive.NewDateTimeFromTime(now), + "invitations": int32(2), + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + err := s.MembershipInvitationCreate(ctx, tc.invitation) + require.NoError(tt, err) + require.NotEmpty(tt, tc.invitation.ID) + + objID, _ := primitive.ObjectIDFromHex(tc.invitation.ID) + userObjID, _ := primitive.ObjectIDFromHex(tc.invitation.UserID) + invitedByObjID, _ := primitive.ObjectIDFromHex(tc.invitation.InvitedBy) + + tmpInvitation := make(map[string]any) + require.NoError(tt, db.Collection("membership_invitations").FindOne(ctx, bson.M{"_id": objID}).Decode(&tmpInvitation)) + + require.Equal(tt, objID, tmpInvitation["_id"]) + require.Equal(tt, userObjID, tmpInvitation["user_id"]) + require.Equal(tt, invitedByObjID, tmpInvitation["invited_by"]) + + for field, expectedValue := range tc.expected { + require.Equal(tt, expectedValue, tmpInvitation[field]) + } + }) + } +} + +func TestStore_MembershipInvitationResolve(t *testing.T) { + type Expected struct { + invitation *models.MembershipInvitation + err error + } + + cases := []struct { + description string + tenantID string + userID string + fixtures []string + expected Expected + }{ + { + description: "fails when invitation not found", + tenantID: "00000000-0000-4000-0000-000000000000", + userID: "000000000000000000000000", + fixtures: []string{fixtureMembershipInvitations, fixtureNamespaces, fixtureUsers}, + expected: Expected{invitation: nil, err: store.ErrNoDocuments}, + }, + { + description: "succeeds fetching email from users collection", + tenantID: "00000000-0000-4000-0000-000000000000", + userID: "6509e169ae6144b2f56bf288", + fixtures: []string{fixtureMembershipInvitations, fixtureNamespaces, fixtureUsers}, + expected: Expected{ + invitation: &models.MembershipInvitation{ + ID: "507f1f77bcf86cd799439012", + TenantID: "00000000-0000-4000-0000-000000000000", + NamespaceName: "namespace-1", + UserID: "6509e169ae6144b2f56bf288", + UserEmail: "maria.garcia@test.com", + InvitedBy: "507f1f77bcf86cd799439011", + Role: authorizer.RoleObserver, + Status: models.MembershipInvitationStatusPending, + }, + err: nil, + }, + }, + { + description: "succeeds fetching email from user_invitations collection", + tenantID: "00000000-0000-4000-0000-000000000000", + userID: "507f1f77bcf86cd799439011", + fixtures: []string{fixtureMembershipInvitations, fixtureNamespaces, fixtureUserInvitations}, + expected: Expected{ + invitation: &models.MembershipInvitation{ + ID: "507f1f77bcf86cd799439014", + TenantID: "00000000-0000-4000-0000-000000000000", + NamespaceName: "namespace-1", + UserID: "507f1f77bcf86cd799439011", + UserEmail: "jane.doe@test.com", + InvitedBy: "6509e169ae6144b2f56bf288", + Role: authorizer.RoleObserver, + Status: models.MembershipInvitationStatusPending, + }, + err: nil, + }, + }, + { + description: "returns most recent when multiple invitations exist", + tenantID: "00000000-0000-4001-0000-000000000000", + userID: "608f32a2c7351f001f6475e0", + fixtures: []string{fixtureMembershipInvitations, fixtureNamespaces, fixtureUsers}, + expected: Expected{ + invitation: &models.MembershipInvitation{ + ID: "507f1f77bcf86cd799439013", + TenantID: "00000000-0000-4001-0000-000000000000", + NamespaceName: "namespace-2", + UserID: "608f32a2c7351f001f6475e0", + UserEmail: "jane.smith@test.com", + InvitedBy: "6509e169ae6144b2f56bf288", + Role: authorizer.RoleAdministrator, + Status: models.MembershipInvitationStatusAccepted, + }, + err: nil, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + invitation, err := s.MembershipInvitationResolve(ctx, tc.tenantID, tc.userID) + + if tc.expected.err != nil { + require.Equal(tt, tc.expected.err, err) + require.Nil(tt, invitation) + } else { + require.NoError(tt, err) + require.NotNil(tt, invitation) + require.Equal(tt, tc.expected.invitation.ID, invitation.ID) + require.Equal(tt, tc.expected.invitation.TenantID, invitation.TenantID) + require.Equal(tt, tc.expected.invitation.NamespaceName, invitation.NamespaceName) + require.Equal(tt, tc.expected.invitation.UserID, invitation.UserID) + require.Equal(tt, tc.expected.invitation.UserEmail, invitation.UserEmail) + require.Equal(tt, tc.expected.invitation.InvitedBy, invitation.InvitedBy) + require.Equal(tt, tc.expected.invitation.Role, invitation.Role) + require.Equal(tt, tc.expected.invitation.Status, invitation.Status) + } + }) + } +} + +func TestStore_MembershipInvitationUpdate(t *testing.T) { + mockClock := new(clockmock.Clock) + clock.DefaultBackend = mockClock + + now := time.Now() + mockClock.On("Now").Return(now) + + type Expected struct { + err error + } + + cases := []struct { + description string + invitation *models.MembershipInvitation + fixtures []string + expected Expected + }{ + { + description: "fails when invitation not found", + invitation: &models.MembershipInvitation{ + ID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + UserID: "6509e169ae6144b2f56bf288", + InvitedBy: "507f1f77bcf86cd799439011", + Role: authorizer.RoleObserver, + Status: models.MembershipInvitationStatusPending, + StatusUpdatedAt: now, + Invitations: 2, + }, + fixtures: []string{fixtureMembershipInvitations}, + expected: Expected{err: store.ErrNoDocuments}, + }, + { + description: "succeeds when invitation found", + invitation: &models.MembershipInvitation{ + ID: "507f1f77bcf86cd799439012", + TenantID: "00000000-0000-4000-0000-000000000000", + UserID: "6509e169ae6144b2f56bf288", + InvitedBy: "507f1f77bcf86cd799439011", + Role: authorizer.RoleAdministrator, + Status: models.MembershipInvitationStatusAccepted, + StatusUpdatedAt: now, + Invitations: 3, + }, + fixtures: []string{fixtureMembershipInvitations}, + expected: Expected{err: nil}, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + err := s.MembershipInvitationUpdate(ctx, tc.invitation) + + if tc.expected.err != nil { + require.Equal(tt, tc.expected.err, err) + } else { + require.NoError(tt, err) + + objID, _ := primitive.ObjectIDFromHex(tc.invitation.ID) + updatedInvitation := &models.MembershipInvitation{} + require.NoError(tt, db.Collection("membership_invitations").FindOne(ctx, bson.M{"_id": objID}).Decode(updatedInvitation)) + + require.Equal(tt, tc.invitation.Role, updatedInvitation.Role) + require.Equal(tt, tc.invitation.Status, updatedInvitation.Status) + require.Equal(tt, tc.invitation.Invitations, updatedInvitation.Invitations) + require.Equal(tt, primitive.NewDateTimeFromTime(now), primitive.NewDateTimeFromTime(updatedInvitation.UpdatedAt)) + } + }) + } +} diff --git a/api/store/mongo/mfa_store.go b/api/store/mongo/mfa_store.go deleted file mode 100644 index 955c6f3fa87..00000000000 --- a/api/store/mongo/mfa_store.go +++ /dev/null @@ -1,108 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/shellhub-io/shellhub/pkg/models" - "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/bson/primitive" -) - -// GetStatusMFA seachr for statusMFA in the lits of users by id. -func (s *Store) GetStatusMFA(ctx context.Context, id string) (bool, error) { - var user models.User - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return false, err - } - - if err := s.db.Collection("users").FindOne(ctx, bson.M{"_id": objID}).Decode(&user); err != nil { - return false, FromMongoError(err) - } - - return user.MFA, nil -} - -// Add a new StatusMFA for the user by email. -func (s *Store) AddStatusMFA(ctx context.Context, username string, statusMFA bool) error { - if _, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"username": username}, bson.M{"$set": bson.M{"status_mfa": statusMFA}}); err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) AddSecret(ctx context.Context, username string, secret string) error { - if _, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"username": username}, bson.M{"$set": bson.M{"secret": secret}}); err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) GetSecret(ctx context.Context, id string) (string, error) { - var user models.User - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return "", err - } - - if err := s.db.Collection("users").FindOne(ctx, bson.M{"_id": objID}).Decode(&user); err != nil { - return "", FromMongoError(err) - } - - return user.Secret, nil -} - -func (s *Store) DeleteSecret(ctx context.Context, username string) error { - _, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"username": username}, bson.M{"$unset": bson.M{"secret": ""}}) - if err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) GetCodes(ctx context.Context, id string) ([]string, error) { - var codes models.User - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return nil, err - } - - if err := s.db.Collection("users").FindOne(ctx, bson.M{"_id": objID}).Decode(&codes); err != nil { - return nil, FromMongoError(err) - } - - return codes.Codes, nil -} - -func (s *Store) AddCodes(ctx context.Context, username string, codes []string) error { - if _, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"username": username}, bson.M{"$set": bson.M{"codes": codes}}); err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) UpdateCodes(ctx context.Context, id string, codes []string) error { - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return FromMongoError(err) - } - - if _, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$set": bson.M{"codes": codes}}); err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) DeleteCodes(ctx context.Context, username string) error { - _, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"username": username}, bson.M{"$unset": bson.M{"codes": ""}}) - if err != nil { - return FromMongoError(err) - } - - return nil -} diff --git a/api/store/mongo/mfa_store_test.go b/api/store/mongo/mfa_store_test.go deleted file mode 100644 index 015d60b7da0..00000000000 --- a/api/store/mongo/mfa_store_test.go +++ /dev/null @@ -1,164 +0,0 @@ -package mongo - -import ( - "context" - "testing" - - "github.com/shellhub-io/mongotest" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/stretchr/testify/assert" -) - -func TestDeleteCodes(t *testing.T) { - ctx := context.TODO() - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - cases := []struct { - description string - username string - fixtures []string - expected error - }{ - { - description: "success when try to delete codes", - username: "username", - fixtures: []string{fixtures.FixtureUsers}, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeleteCodes(ctx, tc.username) - assert.Equal(t, tc.expected, err) - - err = mongotest.DropDatabase() - assert.NoError(t, err) - }) - } -} - -func TestAddStatusMFA(t *testing.T) { - ctx := context.TODO() - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - cases := []struct { - description string - username string - status bool - fixtures []string - expected error - }{ - { - description: "success when try to add status MFA", - username: "username", - status: true, - fixtures: []string{fixtures.FixtureUsers}, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.AddStatusMFA(ctx, tc.username, tc.status) - assert.Equal(t, tc.expected, err) - - err = mongotest.DropDatabase() - assert.NoError(t, err) - }) - } -} - -func TestAddSecret(t *testing.T) { - ctx := context.TODO() - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - cases := []struct { - description string - username string - secret string - fixtures []string - expected error - }{ - { - description: "success when try to add status MFA", - username: "username", - secret: "IOJDSFIAWMKXskdlmawOSDMCALWC", - fixtures: []string{fixtures.FixtureUsers}, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.AddSecret(ctx, tc.username, tc.secret) - assert.Equal(t, tc.expected, err) - - err = mongotest.DropDatabase() - assert.NoError(t, err) - }) - } -} - -func TestDeleteSecret(t *testing.T) { - ctx := context.TODO() - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - cases := []struct { - description string - username string - fixtures []string - expected error - }{ - { - description: "success to delete a status MFA", - username: "username", - fixtures: []string{fixtures.FixtureUsers}, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.DeleteSecret(ctx, tc.username) - assert.Equal(t, tc.expected, err) - - err = mongotest.DropDatabase() - assert.NoError(t, err) - }) - } -} diff --git a/api/store/mongo/migrations.go b/api/store/mongo/migrations.go deleted file mode 100644 index 769daf07cd3..00000000000 --- a/api/store/mongo/migrations.go +++ /dev/null @@ -1,104 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/pkg/errors" - "github.com/shellhub-io/shellhub/api/store/mongo/migrations" - "github.com/sirupsen/logrus" - lock "github.com/square/mongo-lock" - migrate "github.com/xakep666/mongo-migrate" - "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" - "go.mongodb.org/mongo-driver/mongo/writeconcern" -) - -func ApplyMigrations(db *mongo.Database) error { - logrus.Info("Creating lock for the resource migrations") - - lockClient := lock.NewClient(db.Collection("locks", options.Collection().SetWriteConcern(writeconcern.Majority()))) - if err := lockClient.CreateIndexes(context.TODO()); err != nil { - logrus.WithError(err).Fatal("Failed to create a lock for the database") - } - - logrus.Info("Locking the resource migrations") - - lockID := "0" - - if err := lockClient.XLock(context.TODO(), "migrations", lockID, lock.LockDetails{}); err != nil { - logrus.WithError(err).Fatal("Failed to lock the migrations") - } - - defer func() { - logrus.Info("Unlocking the resource migrations") - - if _, err := lockClient.Unlock(context.TODO(), lockID); err != nil { - logrus.WithError(err).Fatal("Failed to unlock the migrations") - } - }() - - if err := fixMigrations072(db); err != nil { - logrus.WithError(err).Fatal("Failed to fix the migrations lock bug") - } - - list := migrations.GenerateMigrations() - migration := migrate.NewMigrate(db, list...) - - current, _, err := migration.Version() - if err != nil { - logrus.WithError(err).Fatal("Failed to get current migration version") - } - - latest := list[len(list)-1] - - if current == latest.Version { - logrus.Info("No migrations to apply") - - return nil - } - - logrus.WithFields(logrus.Fields{ - "from": current, - "to": latest.Version, - }).Info("Migrating database") - - return migration.Up(migrate.AllAvailable) -} - -// This function is necessary due the lock bug on v0.7.2. -func fixMigrations072(db *mongo.Database) error { - // Search for lock in migrations collection. - if _, err := db.Collection("migrations").Find(context.TODO(), - bson.M{"resource": "migrations"}, - ); err != nil && err == mongo.ErrNoDocuments { - // No documents found, nothing to do. - return nil - } else if err != nil { - return errors.Wrap(err, "Failed to find a lock for the migrations") - } - - // Creates a temporary collection containing unique migration documents. - if _, err := db.Collection("migrations").Aggregate(context.TODO(), []bson.M{ - {"$match": bson.M{"version": bson.M{"$ne": nil}}}, - {"$sort": bson.M{"_id": 1}}, - {"$group": bson.M{"_id": "$version", "doc": bson.M{"$first": "$$ROOT"}}}, - {"$replaceRoot": bson.M{"newRoot": "$doc"}}, - {"$out": "migrations_tmp"}, - }); err != nil { - return errors.Wrap(err, "Failed to create a temporary collection") - } - - // Cleanup migrations collection. - if _, err := db.Collection("migrations").DeleteMany(context.TODO(), bson.M{}); err != nil { - return errors.Wrap(err, "Failed to cleanup the migrations collection") - } - - // Copy documents from temporary collection to migrations collection. - if _, err := db.Collection("migrations_tmp").Aggregate(context.TODO(), []bson.M{{"$out": "migrations"}}); err != nil { - return errors.Wrap(err, "Failed to copy the documents to a new migration collection") - } - - // Drop temporary collection. - return db.Collection("migrations_tmp").Drop(context.TODO()) -} diff --git a/api/store/mongo/migrations/init_test.go b/api/store/mongo/migrations/init_test.go deleted file mode 100644 index 48c6d0df61c..00000000000 --- a/api/store/mongo/migrations/init_test.go +++ /dev/null @@ -1,13 +0,0 @@ -package migrations - -import ( - "os" - "testing" -) - -func TestMain(m *testing.M) { - os.Setenv("SHELLHUB_ENTERPRISE", "true") - os.Setenv("SHELLHUB_CLOUD", "true") - code := m.Run() - os.Exit(code) -} diff --git a/api/store/mongo/migrations/main.go b/api/store/mongo/migrations/main.go index 47a4d74ef8e..cd40a56e77d 100644 --- a/api/store/mongo/migrations/main.go +++ b/api/store/mongo/migrations/main.go @@ -73,6 +73,62 @@ func GenerateMigrations() []migrate.Migration { migration61, migration62, migration63, + migration64, + migration65, + migration66, + migration67, + migration68, + migration69, + migration70, + migration71, + migration72, + migration73, + migration74, + migration75, + migration76, + migration77, + migration78, + migration79, + migration80, + migration81, + migration82, + migration83, + migration84, + migration85, + migration86, + migration87, + migration88, + migration89, + migration90, + migration91, + migration92, + migration93, + migration94, + migration95, + migration96, + migration97, + migration98, + migration99, + migration100, + migration101, + migration102, + migration103, + migration104, + migration105, + migration106, + migration107, + migration108, + migration109, + migration110, + migration111, + migration112, + migration113, + migration114, + migration115, + migration116, + migration117, + migration118, + migration119, } } diff --git a/api/store/mongo/migrations/main_test.go b/api/store/mongo/migrations/main_test.go new file mode 100644 index 00000000000..e352a8d1c45 --- /dev/null +++ b/api/store/mongo/migrations/main_test.go @@ -0,0 +1,58 @@ +package migrations + +import ( + "context" + "os" + "testing" + + "github.com/shellhub-io/shellhub/api/pkg/dbtest" + log "github.com/sirupsen/logrus" + "go.mongodb.org/mongo-driver/mongo" + mongooptions "go.mongodb.org/mongo-driver/mongo/options" +) + +var ( + srv = &dbtest.Server{} + c *mongo.Client +) + +func TestMain(m *testing.M) { + os.Setenv("SHELLHUB_ENTERPRISE", "true") + os.Setenv("SHELLHUB_CLOUD", "true") + + log.Info("Starting migration tests") + + ctx := context.Background() + + srv.Container.Database = "test" + + if err := srv.Up(ctx); err != nil { + log.WithError(err).Error("Failed to UP the mongodb container") + os.Exit(1) + } + + log.Info("Connecting to ", srv.Container.ConnectionString) + + var err error + + c, err = mongo.Connect(ctx, mongooptions.Client().ApplyURI(srv.Container.ConnectionString+"/"+srv.Container.Database)) + if err != nil { + log.WithError(err).Error("Unable to connect to MongoDB") + os.Exit(1) + } + + if err := c.Ping(ctx, nil); err != nil { + log.WithError(err).Error("Unable to ping MongoDB") + os.Exit(1) + } + + code := m.Run() + + log.Info("Stopping migration tests") + if err := srv.Down(ctx); err != nil { + log.WithError(err).Error("Failed to DOWN the mongodb container") + os.Exit(1) + } + + os.Exit(code) +} diff --git a/api/store/mongo/migrations/migration_1.go b/api/store/mongo/migrations/migration_1.go index 4ad61db2c28..10551820960 100644 --- a/api/store/mongo/migrations/migration_1.go +++ b/api/store/mongo/migrations/migration_1.go @@ -1,6 +1,8 @@ package migrations import ( + "context" + "github.com/sirupsen/logrus" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/mongo" @@ -9,7 +11,7 @@ import ( var migration1 = migrate.Migration{ Version: 1, Description: "Create the database for the system", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 1, @@ -17,8 +19,8 @@ var migration1 = migrate.Migration{ }).Info("Applying migration") return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 1, @@ -26,5 +28,5 @@ var migration1 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_10.go b/api/store/mongo/migrations/migration_10.go index f943eeb2c6d..3cbfc77d696 100644 --- a/api/store/mongo/migrations/migration_10.go +++ b/api/store/mongo/migrations/migration_10.go @@ -13,7 +13,7 @@ import ( var migration10 = migrate.Migration{ Version: 10, Description: "Unset unique on session_record in the users collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 10, @@ -23,14 +23,14 @@ var migration10 = migrate.Migration{ Keys: bson.D{{"session_record", 1}}, Options: options.Index().SetName("session_record").SetUnique(false), } - if _, err := db.Collection("users").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("users").Indexes().CreateOne(ctx, mod); err != nil { return err } - _, err := db.Collection("users").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"session_record": true}}) + _, err := db.Collection("users").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"session_record": true}}) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 10, @@ -38,5 +38,5 @@ var migration10 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_100.go b/api/store/mongo/migrations/migration_100.go new file mode 100644 index 00000000000..d8567c145fd --- /dev/null +++ b/api/store/mongo/migrations/migration_100.go @@ -0,0 +1,138 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration100 = migrate.Migration{ + Version: 100, + Description: "Remove direct-tcpip events", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 100, + "action": "Up", + }).Info("Applying migration: 100") + + pipeline := mongo.Pipeline{ + {{Key: "$match", Value: bson.M{"type": "direct-tcpip"}}}, + {{Key: "$group", Value: bson.M{ + "_id": "$session", + "seats": bson.M{"$addToSet": "$seat"}, + }}}, + } + + cursor, err := db.Collection("sessions_events").Aggregate(ctx, pipeline) + if err != nil { + logrus.WithError(err).Error("Failed to aggregate direct-tcpip seats") + + return err + } + + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + var result struct { + ID string `bson:"_id"` + Seats []int `bson:"seats"` + } + + if err := cursor.Decode(&result); err != nil { + logrus.WithError(err).Error("Failed to decode aggregation result") + + return err + } + + _, err := db.Collection("sessions").UpdateOne( + ctx, + bson.M{"uid": result.ID}, + bson.M{"$pullAll": bson.M{ + "events.seats": result.Seats, + }}, + ) + if err != nil { + logrus.WithError(err).WithField("session_uid", result.ID).Error("Failed to remove seats from session") + + return err + } + } + + if err := cursor.Err(); err != nil { + logrus.WithError(err).Error("Cursor error during seat removal") + + return err + } + + _, err = db.Collection("sessions").UpdateMany( + ctx, + bson.M{"events.types": "direct-tcpip"}, + bson.M{"$pull": bson.M{ + "events.types": "direct-tcpip", + }}, + ) + if err != nil { + logrus.WithError(err).Error("Failed to remove direct-tcpip from events.types in sessions") + + return err + } + + _, err = db.Collection("sessions_events").DeleteMany( + ctx, + bson.M{"type": "direct-tcpip"}, + ) + if err != nil { + logrus.WithError(err).Error("Failed to remove direct-tcpip events from sessions_events") + + return err + } + + _, err = db.Collection("sessions").UpdateMany( + ctx, + bson.M{ + "recorded": true, + "events.types": bson.M{"$size": 0}, + }, + bson.M{"$set": bson.M{"recorded": false}}, + ) + if err != nil { + logrus.WithError(err).Error("Failed to update recorded flag for sessions with empty events.types") + + return err + } + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 100, + "action": "Down", + }).Info("Reverting migration: 100") + + _, err := db.Collection("sessions").UpdateMany( + ctx, + bson.M{ + "recorded": false, + "events.types": bson.M{"$size": 0}, + }, + bson.M{"$set": bson.M{"recorded": true}}, + ) + if err != nil { + logrus.WithError(err).Error("Failed to revert recorded flag changes") + + return err + } + + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 100, + }).Warn("Cannot restore deleted direct-tcpip events and seats - data loss is permanent") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_100_test.go b/api/store/mongo/migrations/migration_100_test.go new file mode 100644 index 00000000000..f5ee012fe3c --- /dev/null +++ b/api/store/mongo/migrations/migration_100_test.go @@ -0,0 +1,369 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration100Up(t *testing.T) { + ctx := context.Background() + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "remove direct-tcpip seats and types from session", + setup: func() error { + sessionUID := "test-session-1" + _, err := c.Database("test").Collection("sessions").InsertOne(ctx, bson.M{ + "_id": primitive.NewObjectID(), + "uid": sessionUID, + "recorded": true, + "events": bson.M{ + "seats": []int{0, 1, 2, 3, 4, 5}, + "types": []string{"pty-req", "direct-tcpip", "shell", "direct-tcpip", "exit-status"}, + }, + }) + if err != nil { + return err + } + + events := []any{ + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "pty-req", "seat": 0}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 1}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "shell", "seat": 2}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 3}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "exit-status", "seat": 4}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 5}, + } + _, err = c.Database("test").Collection("sessions_events").InsertMany(ctx, events) + + return err + }, + verify: func(tt *testing.T) { + session := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("sessions").FindOne(ctx, bson.M{"uid": "test-session-1"}).Decode(&session)) + + events, ok := session["events"] + require.True(tt, ok) + eventsMap := events.(map[string]any) + + seats, ok := eventsMap["seats"] + require.True(tt, ok) + seatsList := seats.(bson.A) + require.Len(tt, seatsList, 3) + require.Contains(tt, seatsList, int32(0)) + require.Contains(tt, seatsList, int32(2)) + require.Contains(tt, seatsList, int32(4)) + require.NotContains(tt, seatsList, int32(1)) + require.NotContains(tt, seatsList, int32(3)) + require.NotContains(tt, seatsList, int32(5)) + + types, ok := eventsMap["types"] + require.True(tt, ok) + typesList := types.(bson.A) + require.Len(tt, typesList, 3) + require.Contains(tt, typesList, "pty-req") + require.Contains(tt, typesList, "shell") + require.Contains(tt, typesList, "exit-status") + require.NotContains(tt, typesList, "direct-tcpip") + }, + }, + { + description: "remove all direct-tcpip events from sessions_events collection", + setup: func() error { + events := []any{ + bson.M{"_id": primitive.NewObjectID(), "session": "session1", "type": "direct-tcpip", "seat": 1}, + bson.M{"_id": primitive.NewObjectID(), "session": "session1", "type": "shell", "seat": 2}, + bson.M{"_id": primitive.NewObjectID(), "session": "session2", "type": "direct-tcpip", "seat": 0}, + } + _, err := c.Database("test").Collection("sessions_events").InsertMany(ctx, events) + + return err + }, + verify: func(tt *testing.T) { + count, err := c.Database("test").Collection("sessions_events").CountDocuments(ctx, bson.M{"type": "direct-tcpip"}) + require.NoError(tt, err) + require.Equal(tt, int64(0), count) + + count, err = c.Database("test").Collection("sessions_events").CountDocuments(ctx, bson.M{"type": "shell"}) + require.NoError(tt, err) + require.Equal(tt, int64(1), count) + }, + }, + { + description: "mark recorded=false for sessions with empty events.types after removal", + setup: func() error { + sessionUID := "test-session-2" + _, err := c.Database("test").Collection("sessions").InsertOne(ctx, bson.M{ + "_id": primitive.NewObjectID(), + "uid": sessionUID, + "recorded": true, + "events": bson.M{ + "seats": []int{0, 1}, + "types": []string{"direct-tcpip", "direct-tcpip"}, + }, + }) + if err != nil { + return err + } + + events := []any{ + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 0}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 1}, + } + _, err = c.Database("test").Collection("sessions_events").InsertMany(ctx, events) + + return err + }, + verify: func(tt *testing.T) { + session := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("sessions").FindOne(ctx, bson.M{"uid": "test-session-2"}).Decode(&session)) + + recorded, ok := session["recorded"] + require.True(tt, ok) + require.Equal(tt, false, recorded) + + events, ok := session["events"] + require.True(tt, ok) + eventsMap := events.(map[string]any) + + types, ok := eventsMap["types"] + require.True(tt, ok) + typesList := types.(bson.A) + require.Len(tt, typesList, 0) + + seats, ok := eventsMap["seats"] + require.True(tt, ok) + seatsList := seats.(bson.A) + require.Len(tt, seatsList, 0) + }, + }, + { + description: "session with no direct-tcpip events remains unchanged", + setup: func() error { + sessionUID := "test-session-3" + _, err := c.Database("test").Collection("sessions").InsertOne(ctx, bson.M{ + "_id": primitive.NewObjectID(), + "uid": sessionUID, + "recorded": true, + "events": bson.M{ + "seats": []int{0, 1, 2}, + "types": []string{"pty-req", "shell", "exit-status"}, + }, + }) + if err != nil { + return err + } + + events := []any{ + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "pty-req", "seat": 0}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "shell", "seat": 1}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "exit-status", "seat": 2}, + } + _, err = c.Database("test").Collection("sessions_events").InsertMany(ctx, events) + + return err + }, + verify: func(tt *testing.T) { + session := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("sessions").FindOne(ctx, bson.M{"uid": "test-session-3"}).Decode(&session)) + + recorded, ok := session["recorded"] + require.True(tt, ok) + require.Equal(tt, true, recorded) + + events, ok := session["events"] + require.True(tt, ok) + eventsMap := events.(map[string]any) + + seats, ok := eventsMap["seats"] + require.True(tt, ok) + seatsList := seats.(bson.A) + require.Len(tt, seatsList, 3) + require.Contains(tt, seatsList, int32(0)) + require.Contains(tt, seatsList, int32(1)) + require.Contains(tt, seatsList, int32(2)) + + types, ok := eventsMap["types"] + require.True(tt, ok) + typesList := types.(bson.A) + require.Len(tt, typesList, 3) + require.Contains(tt, typesList, "pty-req") + require.Contains(tt, typesList, "shell") + require.Contains(tt, typesList, "exit-status") + }, + }, + { + description: "partial removal - some seats have direct-tcpip, others don't", + setup: func() error { + sessionUID := "test-session-4" + _, err := c.Database("test").Collection("sessions").InsertOne(ctx, bson.M{ + "_id": primitive.NewObjectID(), + "uid": sessionUID, + "recorded": true, + "events": bson.M{ + "seats": []int{0, 1, 2, 3, 4}, + "types": []string{"pty-req", "direct-tcpip", "shell", "exit-status"}, + }, + }) + if err != nil { + return err + } + + events := []any{ + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "pty-req", "seat": 0}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 1}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "shell", "seat": 2}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "exit-status", "seat": 3}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 4}, + } + _, err = c.Database("test").Collection("sessions_events").InsertMany(ctx, events) + + return err + }, + verify: func(tt *testing.T) { + session := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("sessions").FindOne(ctx, bson.M{"uid": "test-session-4"}).Decode(&session)) + + recorded, ok := session["recorded"] + require.True(tt, ok) + require.Equal(tt, true, recorded) + + events, ok := session["events"] + require.True(tt, ok) + eventsMap := events.(map[string]any) + + seats, ok := eventsMap["seats"] + require.True(tt, ok) + seatsList := seats.(bson.A) + require.Len(tt, seatsList, 3) + require.Contains(tt, seatsList, int32(0)) + require.Contains(tt, seatsList, int32(2)) + require.Contains(tt, seatsList, int32(3)) + require.NotContains(tt, seatsList, int32(1)) + require.NotContains(tt, seatsList, int32(4)) + + types, ok := eventsMap["types"] + require.True(tt, ok) + typesList := types.(bson.A) + require.Len(tt, typesList, 3) + require.Contains(tt, typesList, "pty-req") + require.Contains(tt, typesList, "shell") + require.Contains(tt, typesList, "exit-status") + require.NotContains(tt, typesList, "direct-tcpip") + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[99]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration100Down(t *testing.T) { + ctx := context.Background() + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "revert recorded=true for sessions with empty events.types", + setup: func() error { + sessionUID := "test-session-1" + _, err := c.Database("test").Collection("sessions").InsertOne(ctx, bson.M{ + "_id": primitive.NewObjectID(), + "uid": sessionUID, + "recorded": true, + "events": bson.M{ + "seats": []int{0, 1}, + "types": []string{"direct-tcpip"}, + }, + }) + if err != nil { + return err + } + + events := []any{ + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 0}, + bson.M{"_id": primitive.NewObjectID(), "session": sessionUID, "type": "direct-tcpip", "seat": 1}, + } + _, err = c.Database("test").Collection("sessions_events").InsertMany(ctx, events) + + return err + }, + verify: func(tt *testing.T) { + session := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("sessions").FindOne(ctx, bson.M{"uid": "test-session-1"}).Decode(&session)) + + recorded, ok := session["recorded"] + require.True(tt, ok) + require.Equal(tt, true, recorded) + + events, ok := session["events"] + require.True(tt, ok) + eventsMap := events.(map[string]any) + + types, ok := eventsMap["types"] + require.True(tt, ok) + typesList := types.(bson.A) + require.Len(tt, typesList, 0) + + seats, ok := eventsMap["seats"] + require.True(tt, ok) + seatsList := seats.(bson.A) + require.Len(tt, seatsList, 0) + }, + }, + { + description: "verify direct-tcpip events cannot be restored in sessions_events", + setup: func() error { + events := []any{ + bson.M{"_id": primitive.NewObjectID(), "session": "session1", "type": "direct-tcpip", "seat": 1}, + bson.M{"_id": primitive.NewObjectID(), "session": "session1", "type": "shell", "seat": 2}, + } + _, err := c.Database("test").Collection("sessions_events").InsertMany(ctx, events) + + return err + }, + verify: func(tt *testing.T) { + count, err := c.Database("test").Collection("sessions_events").CountDocuments(ctx, bson.M{"type": "direct-tcpip"}) + require.NoError(tt, err) + require.Equal(tt, int64(0), count) + + count, err = c.Database("test").Collection("sessions_events").CountDocuments(ctx, bson.M{"type": "shell"}) + require.NoError(tt, err) + require.Equal(tt, int64(1), count) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[99]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_101.go b/api/store/mongo/migrations/migration_101.go new file mode 100644 index 00000000000..276bd731d4b --- /dev/null +++ b/api/store/mongo/migrations/migration_101.go @@ -0,0 +1,136 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration101 = migrate.Migration{ + Version: 101, + Description: "Add device count fields to namespaces based on existing devices", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 101, "action": "Up"}).Info("Applying migration") + + initDoc := bson.M{ + "$set": bson.M{ + "devices_accepted_count": 0, + "devices_pending_count": 0, + "devices_rejected_count": 0, + }, + } + + if _, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, initDoc); err != nil { + log.WithError(err).Error("Failed to initialize namespace device counts") + + return err + } + + pipeline := []bson.M{ + { + "$group": bson.M{ + "_id": bson.M{ + "tenant_id": "$tenant_id", + "status": "$status", + }, + "count": bson.M{"$sum": 1}, + }, + }, + { + "$group": bson.M{ + "_id": "$_id.tenant_id", + "counts": bson.M{ + "$push": bson.M{ + "status": "$_id.status", + "count": "$count", + }, + }, + }, + }, + } + + cursor, err := db.Collection("devices").Aggregate(ctx, pipeline) + if err != nil { + log.WithError(err).Error("Failed to aggregate device counts") + + return err + } + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + var result struct { + ID string `bson:"_id"` + Counts []struct { + Status string `bson:"status"` + Count int `bson:"count"` + } `bson:"counts"` + } + + if err := cursor.Decode(&result); err != nil { + log.WithError(err).Error("Failed to decode aggregation result") + + continue + } + + updateDoc := bson.M{ + "$set": bson.M{ + "devices_accepted_count": 0, + "devices_pending_count": 0, + "devices_rejected_count": 0, + }, + } + + for _, count := range result.Counts { + switch count.Status { + case "accepted": + updateDoc["$set"].(bson.M)["devices_accepted_count"] = count.Count + case "pending": + updateDoc["$set"].(bson.M)["devices_pending_count"] = count.Count + case "rejected": + updateDoc["$set"].(bson.M)["devices_rejected_count"] = count.Count + } + } + + if _, err := db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": result.ID}, updateDoc); err != nil { + log.WithFields(log.Fields{"tenant_id": result.ID, "error": err}).Error("Failed to update namespace device counts") + + continue + } + + log.WithFields(log.Fields{"tenant_id": result.ID, "counts": result.Counts}).Info("Updated namespace device counts") + } + + if err := cursor.Err(); err != nil { + log.WithError(err).Error("Cursor error during migration") + + return err + } + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 101, "action": "Down"}).Info("Reverting migration") + + updateDoc := bson.M{ + "$unset": bson.M{ + "devices_accepted_count": "", + "devices_pending_count": "", + "devices_rejected_count": "", + }, + } + + result, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, updateDoc) + if err != nil { + log.WithError(err).Error("Failed to remove device count fields") + + return err + } + + log.WithFields(log.Fields{"modified_count": result.ModifiedCount}).Info("Removed device count fields from namespaces") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_101_test.go b/api/store/mongo/migrations/migration_101_test.go new file mode 100644 index 00000000000..bd04a1abeba --- /dev/null +++ b/api/store/mongo/migrations/migration_101_test.go @@ -0,0 +1,244 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration101Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds with devices and namespaces", + setup: func() error { + namespaces := []bson.M{ + { + "tenant_id": "tenant-1", + "name": "namespace-1", + }, + { + "tenant_id": "tenant-2", + "name": "namespace-2", + }, + { + "tenant_id": "tenant-3", + "name": "namespace-3", + }, + } + + _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0], namespaces[1], namespaces[2]}) + if err != nil { + return err + } + + devices := []bson.M{ + // tenant-1: 3 accepted, 2 pending, 1 rejected + {"tenant_id": "tenant-1", "uid": "dev-1", "status": "accepted"}, + {"tenant_id": "tenant-1", "uid": "dev-2", "status": "accepted"}, + {"tenant_id": "tenant-1", "uid": "dev-3", "status": "accepted"}, + {"tenant_id": "tenant-1", "uid": "dev-4", "status": "pending"}, + {"tenant_id": "tenant-1", "uid": "dev-5", "status": "pending"}, + {"tenant_id": "tenant-1", "uid": "dev-6", "status": "rejected"}, + + // tenant-2: 1 accepted, 0 pending, 2 rejected + {"tenant_id": "tenant-2", "uid": "dev-7", "status": "accepted"}, + {"tenant_id": "tenant-2", "uid": "dev-8", "status": "rejected"}, + {"tenant_id": "tenant-2", "uid": "dev-9", "status": "rejected"}, + + // tenant-3: no devices (test empty case) + } + + _, err = c. + Database("test"). + Collection("devices"). + InsertMany(ctx, []any{devices[0], devices[1], devices[2], devices[3], devices[4], devices[5], devices[6], devices[7], devices[8]}) + + return err + }, + verify: func(tt *testing.T) { + namespace1 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-1"}).Decode(&namespace1)) + assert.Equal(tt, int32(3), namespace1["devices_accepted_count"].(int32)) + assert.Equal(tt, int32(2), namespace1["devices_pending_count"].(int32)) + assert.Equal(tt, int32(1), namespace1["devices_rejected_count"].(int32)) + + namespace2 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-2"}).Decode(&namespace2)) + assert.Equal(tt, int32(1), namespace2["devices_accepted_count"].(int32)) + assert.Equal(tt, int32(0), namespace2["devices_pending_count"].(int32)) + assert.Equal(tt, int32(2), namespace2["devices_rejected_count"].(int32)) + + namespace3 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-3"}).Decode(&namespace3)) + assert.Equal(tt, int32(0), namespace3["devices_accepted_count"]) + assert.Equal(tt, int32(0), namespace3["devices_pending_count"]) + assert.Equal(tt, int32(0), namespace3["devices_rejected_count"]) + }, + }, + { + description: "succeeds with namespaces but no devices", + setup: func() error { + namespace := bson.M{"tenant_id": "tenant-empty", "name": "empty-namespace"} + _, err := c.Database("test").Collection("namespaces").InsertOne(ctx, namespace) + + return err + }, + verify: func(tt *testing.T) { + namespace := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-empty"}).Decode(&namespace)) + assert.Equal(tt, int32(0), namespace["devices_accepted_count"]) + assert.Equal(tt, int32(0), namespace["devices_pending_count"]) + assert.Equal(tt, int32(0), namespace["devices_rejected_count"]) + }, + }, + { + description: "succeeds with single status devices", + setup: func() error { + namespace := bson.M{ + "tenant_id": "tenant-single", + "name": "single-status-namespace", + } + _, err := c.Database("test").Collection("namespaces").InsertOne(ctx, namespace) + if err != nil { + return err + } + + devices := []bson.M{ + {"tenant_id": "tenant-single", "uid": "dev-a", "status": "pending"}, + {"tenant_id": "tenant-single", "uid": "dev-b", "status": "pending"}, + {"tenant_id": "tenant-single", "uid": "dev-c", "status": "pending"}, + } + + _, err = c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1], devices[2]}) + + return err + }, + verify: func(tt *testing.T) { + namespace := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-single"}).Decode(&namespace)) + assert.Equal(tt, int32(0), namespace["devices_accepted_count"]) + assert.Equal(tt, int32(3), namespace["devices_pending_count"]) + assert.Equal(tt, int32(0), namespace["devices_rejected_count"]) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[100]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration101Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds removing device count fields", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne( + ctx, + bson.M{ + "tenant_id": "tenant-down", + "name": "test-namespace", + "devices_accepted_count": 15, + "devices_pending_count": 3, + "devices_rejected_count": 2, + "other_field": "should_remain", + }, + ) + + return err + }, + verify: func(tt *testing.T) { + namespace := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-down"}).Decode(&namespace)) + + _, hasAccepted := namespace["devices_accepted_count"] + _, hasPending := namespace["devices_pending_count"] + _, hasRejected := namespace["devices_rejected_count"] + + assert.False(tt, hasAccepted) + assert.False(tt, hasPending) + assert.False(tt, hasRejected) + assert.Equal(tt, "test-namespace", namespace["name"]) + assert.Equal(tt, "should_remain", namespace["other_field"]) + }, + }, + { + description: "succeeds with multiple namespaces", + setup: func() error { + namespaces := []bson.M{ + { + "tenant_id": "tenant-1", + "name": "namespace-1", + "devices_accepted_count": 10, + "devices_pending_count": 5, + }, + { + "tenant_id": "tenant-2", + "name": "namespace-2", + "devices_rejected_count": 2, + }, + } + _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0], namespaces[1]}) + + return err + }, + verify: func(tt *testing.T) { + namespace1 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-1"}).Decode(&namespace1)) + + _, hasFields1 := namespace1["devices_accepted_count"] + assert.False(tt, hasFields1) + assert.Equal(tt, "namespace-1", namespace1["name"]) + + namespace2 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-2"}).Decode(&namespace2)) + + _, hasFields2 := namespace2["devices_rejected_count"] + assert.False(tt, hasFields2) + assert.Equal(tt, "namespace-2", namespace2["name"]) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[100]) // index 100 for migration 101 + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_102.go b/api/store/mongo/migrations/migration_102.go new file mode 100644 index 00000000000..4bacfdcceb2 --- /dev/null +++ b/api/store/mongo/migrations/migration_102.go @@ -0,0 +1,34 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration102 = migrate.Migration{ + Version: 102, + Description: "Remove legacy devices_count field from namespaces in favor of status-specific counters", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 102, "action": "Up"}).Info("Applying migration") + + r, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"devices_count": ""}}) + if err != nil { + log.WithError(err).Error("Failed to remove device count fields") + + return err + } + + log.WithFields(log.Fields{"modified_count": r.ModifiedCount}).Info("Removed device count fields from namespaces") + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 102, "action": "Down"}).Info("Cannot revert migration") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_102_test.go b/api/store/mongo/migrations/migration_102_test.go new file mode 100644 index 00000000000..e67767512f1 --- /dev/null +++ b/api/store/mongo/migrations/migration_102_test.go @@ -0,0 +1,69 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration102Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds removing legacy devices_count field", + setup: func() error { + namespaces := []bson.M{ + { + "tenant_id": "tenant-1", + "devices_count": 42, + "devices_accepted_count": 42, + "devices_pending_count": 0, + "devices_rejected_count": 0, + }, + { + "tenant_id": "tenant-2", + "devices_accepted_count": 5, + }, + } + _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0], namespaces[1]}) + + return err + }, + verify: func(tt *testing.T) { + namespace1 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-1"}).Decode(&namespace1)) + _, hasLegacyCount := namespace1["devices_count"] + assert.False(tt, hasLegacyCount) + assert.Equal(tt, int32(42), namespace1["devices_accepted_count"]) + assert.Equal(tt, int32(0), namespace1["devices_pending_count"]) + assert.Equal(tt, int32(0), namespace1["devices_rejected_count"]) + + namespace2 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-2"}).Decode(&namespace2)) + _, hasLegacyCount3 := namespace2["devices_count"] + assert.False(tt, hasLegacyCount3) + assert.Equal(tt, int32(5), namespace2["devices_accepted_count"]) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[101]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_103.go b/api/store/mongo/migrations/migration_103.go new file mode 100644 index 00000000000..fd25013a229 --- /dev/null +++ b/api/store/mongo/migrations/migration_103.go @@ -0,0 +1,152 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration103 = migrate.Migration{ + Version: 103, + Description: "Convert devices_removed from removed_devices collection to devices with status removed", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 103, "action": "Up"}).Info("Applying migration") + + cursor, err := db.Collection("removed_devices").Find(ctx, bson.M{}) + if err != nil { + log.WithError(err).Error("Failed to find removed devices") + + return err + } + defer cursor.Close(ctx) + + processedCount := 0 + skippedCount := 0 + + for cursor.Next(ctx) { + var removedDevice struct { + Device map[string]any `bson:"device"` + Timestamp any `bson:"timestamp"` + } + + if err := cursor.Decode(&removedDevice); err != nil { + log.WithError(err).Error("Failed to decode removed device") + + continue + } + + if removedDevice.Device == nil { + log.Warn("Skipping removed device with nil device data") + skippedCount++ + + continue + } + + existingDevice := db.Collection("devices").FindOne(ctx, bson.M{"uid": removedDevice.Device["uid"]}) + if existingDevice.Err() == nil { + log.WithFields(log.Fields{"uid": removedDevice.Device["uid"]}).Info("Device already exists in devices collection, skipping") + skippedCount++ + + continue + } + + deviceDoc := removedDevice.Device + deviceDoc["status"] = string(models.DeviceStatusRemoved) + deviceDoc["status_updated_at"] = removedDevice.Timestamp + + if _, err := db.Collection("devices").InsertOne(ctx, deviceDoc); err != nil { + log.WithFields(log.Fields{"uid": removedDevice.Device["uid"], "error": err}).Error("Failed to insert device") + + continue + } + + processedCount++ + log.WithFields(log.Fields{"uid": removedDevice.Device["uid"]}).Info("Successfully converted removed device to devices collection") + } + + if err := cursor.Err(); err != nil { + log.WithError(err).Error("Cursor error during migration") + + return err + } + + log.WithFields(log.Fields{"processed_count": processedCount, "skipped_count": skippedCount}). + Info("Migration completed successfully") + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 102, "action": "Down"}).Info("Reverting migration") + + // NOTE: This rollback has a known limitation - we don't store the original device status + // before it was changed to "removed". This means when reverting, devices in the + // removed_devices collection will have status="removed" instead of their original + // pre-deletion status (e.g., "accepted", "pending", etc.). + // This is acceptable for rollback purposes as the main goal is data preservation. + + cursor, err := db.Collection("devices").Find(ctx, bson.M{"status": "removed"}) + if err != nil { + log.WithError(err).Error("Failed to find devices with status removed") + + return err + } + defer cursor.Close(ctx) + + processedCount := 0 + skippedCount := 0 + + for cursor.Next(ctx) { + var device map[string]any + + if err := cursor.Decode(&device); err != nil { + log.WithError(err).Error("Failed to decode device") + + continue + } + + timestamp, exists := device["status_updated_at"] + if !exists { + log.WithFields(log.Fields{"uid": device["uid"]}).Warn("Device missing status_updated_at, skipping") + skippedCount++ + + continue + } + + removedDevice := bson.M{ + "device": device, + "timestamp": timestamp, + } + + if _, err := db.Collection("removed_devices").InsertOne(ctx, removedDevice); err != nil { + log.WithFields(log.Fields{"uid": device["uid"], "error": err}).Error("Failed to insert removed device") + + continue + } + + processedCount++ + log.WithFields(log.Fields{"uid": device["uid"]}).Info("Successfully reverted device to removed_devices collection") + } + + if err := cursor.Err(); err != nil { + log.WithError(err).Error("Cursor error during migration revert") + + return err + } + + result, err := db.Collection("devices").DeleteMany(ctx, bson.M{"status": "removed"}) + if err != nil { + log.WithError(err).Error("Failed to remove devices with status removed") + + return err + } + + log.WithFields(log.Fields{"processed_count": processedCount, "skipped_count": skippedCount, "deleted_count": result.DeletedCount}). + Info("Migration revert completed successfully") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_103_test.go b/api/store/mongo/migrations/migration_103_test.go new file mode 100644 index 00000000000..d90c9013504 --- /dev/null +++ b/api/store/mongo/migrations/migration_103_test.go @@ -0,0 +1,283 @@ +package migrations + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration103Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds converting devices_removed to devices with status removed", + setup: func() error { + timestamp := time.Now() + + removedDevices := []bson.M{ + { + "device": bson.M{ + "_id": primitive.NewObjectID(), + "uid": "device-1", + "name": "Device 1", + "tenant_id": "tenant-1", + "status": "accepted", + "created_at": timestamp, + }, + "timestamp": timestamp, + }, + { + "device": bson.M{ + "_id": primitive.NewObjectID(), + "uid": "device-2", + "name": "Device 2", + "tenant_id": "tenant-2", + "status": "pending", + "created_at": timestamp, + }, + "timestamp": timestamp, + }, + } + + existingID := primitive.NewObjectID() + existingDevice := bson.M{ + "_id": existingID, + "uid": "device-3", + "name": "Existing Device", + "tenant_id": "tenant-1", + "status": "accepted", + "created_at": timestamp, + } + + removedDeviceExisting := bson.M{"device": existingDevice, "timestamp": timestamp} + + removedDeviceNil := bson.M{"device": nil, "timestamp": timestamp} + + if _, err := c.Database("test").Collection("devices").InsertOne(ctx, existingDevice); err != nil { + return err + } + + allRemovedDevices := []any{ + removedDevices[0], + removedDevices[1], + removedDeviceExisting, + removedDeviceNil, + } + + _, err := c.Database("test").Collection("removed_devices").InsertMany(ctx, allRemovedDevices) + + return err + }, + verify: func(tt *testing.T) { + var device1 map[string]any + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-1"}).Decode(&device1)) + assert.Equal(tt, "device-1", device1["uid"]) + assert.Equal(tt, "Device 1", device1["name"]) + assert.Equal(tt, "tenant-1", device1["tenant_id"]) + assert.Equal(tt, "removed", device1["status"]) + assert.NotNil(tt, device1["status_updated_at"]) + + var device2 map[string]any + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-2"}).Decode(&device2)) + assert.Equal(tt, "device-2", device2["uid"]) + assert.Equal(tt, "Device 2", device2["name"]) + assert.Equal(tt, "tenant-2", device2["tenant_id"]) + assert.Equal(tt, "removed", device2["status"]) + assert.NotNil(tt, device2["status_updated_at"]) + + var device3 map[string]any + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-3"}).Decode(&device3)) + assert.Equal(tt, "device-3", device3["uid"]) + assert.Equal(tt, "Existing Device", device3["name"]) + assert.Equal(tt, "accepted", device3["status"]) + + count, err := c.Database("test").Collection("devices").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(3), count) + + removedCount, err := c.Database("test").Collection("devices").CountDocuments(ctx, bson.M{"status": "removed"}) + require.NoError(tt, err) + assert.Equal(tt, int64(2), removedCount) + }, + }, + { + description: "handles empty removed_devices collection", + setup: func() error { + return nil + }, + verify: func(tt *testing.T) { + count, err := c.Database("test").Collection("devices").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[102]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration103Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds reverting devices with status removed back to removed_devices", + setup: func() error { + timestamp := time.Now() + + devices := []bson.M{ + { + "_id": primitive.NewObjectID(), + "uid": "device-1", + "name": "Device 1", + "tenant_id": "tenant-1", + "public_key": "key1", + "status": "removed", + "status_updated_at": timestamp, + "created_at": timestamp, + }, + { + "_id": primitive.NewObjectID(), + "uid": "device-2", + "name": "Device 2", + "tenant_id": "tenant-2", + "public_key": "key2", + "status": "removed", + "status_updated_at": timestamp, + "created_at": timestamp, + }, + { + "_id": primitive.NewObjectID(), + "uid": "device-3", + "name": "Device 3", + "tenant_id": "tenant-1", + "status": "accepted", + "created_at": timestamp, + }, + { + "_id": primitive.NewObjectID(), + "uid": "device-4", + "name": "Device 4", + "tenant_id": "tenant-1", + "status": "removed", + "created_at": timestamp, + }, + } + + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1], devices[2], devices[3]}) + + return err + }, + verify: func(tt *testing.T) { + removedCount, err := c.Database("test").Collection("devices").CountDocuments(ctx, bson.M{"status": "removed"}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), removedCount) + + totalCount, err := c.Database("test").Collection("devices").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(1), totalCount) + + var device map[string]any + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-3"}).Decode(&device)) + assert.Equal(tt, "device-3", device["uid"]) + assert.Equal(tt, "accepted", device["status"]) + + removedDevicesCount, err := c.Database("test").Collection("removed_devices").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(2), removedDevicesCount) + + var removedDevice1 map[string]any + require.NoError(tt, c.Database("test").Collection("removed_devices").FindOne(ctx, bson.M{"device.uid": "device-1"}).Decode(&removedDevice1)) + + device1Data := removedDevice1["device"].(map[string]any) + assert.Equal(tt, "device-1", device1Data["uid"]) + assert.Equal(tt, "Device 1", device1Data["name"]) + assert.Equal(tt, "tenant-1", device1Data["tenant_id"]) + assert.Equal(tt, "key1", device1Data["public_key"]) + + assert.NotNil(tt, removedDevice1["timestamp"]) + + var removedDevice2 map[string]any + require.NoError(tt, c.Database("test").Collection("removed_devices").FindOne(ctx, bson.M{"device.uid": "device-2"}).Decode(&removedDevice2)) + + device2Data := removedDevice2["device"].(map[string]any) + assert.Equal(tt, "device-2", device2Data["uid"]) + assert.Equal(tt, "Device 2", device2Data["name"]) + assert.Equal(tt, "tenant-2", device2Data["tenant_id"]) + assert.Equal(tt, "key2", device2Data["public_key"]) + + assert.NotNil(tt, removedDevice2["timestamp"]) + + device4Count, err := c.Database("test").Collection("removed_devices").CountDocuments(ctx, bson.M{"device.uid": "device-4"}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), device4Count) + }, + }, + { + description: "handles empty devices collection with status removed", + setup: func() error { + timestamp := time.Now() + + devices := []bson.M{ + { + "_id": primitive.NewObjectID(), + "uid": "device-1", + "name": "Device 1", + "tenant_id": "tenant-1", + "status": "accepted", + "created_at": timestamp, + }, + } + + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0]}) + + return err + }, + verify: func(tt *testing.T) { + totalCount, err := c.Database("test").Collection("devices").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(1), totalCount) + + removedCount, err := c.Database("test").Collection("removed_devices").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), removedCount) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[102]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_104.go b/api/store/mongo/migrations/migration_104.go new file mode 100644 index 00000000000..ca680a9db0a --- /dev/null +++ b/api/store/mongo/migrations/migration_104.go @@ -0,0 +1,112 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration104 = migrate.Migration{ + Version: 104, + Description: "Add devices_removed_count field to namespaces based on existing devices with status removed", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 104, "action": "Up"}).Info("Applying migration") + + initDoc := bson.M{ + "$set": bson.M{ + "devices_removed_count": 0, + }, + } + + if _, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, initDoc); err != nil { + log.WithError(err).Error("Failed to initialize namespace devices_removed_count") + + return err + } + + pipeline := []bson.M{ + { + "$match": bson.M{ + "status": "removed", + }, + }, + { + "$group": bson.M{ + "_id": "$tenant_id", + "count": bson.M{"$sum": 1}, + }, + }, + } + + cursor, err := db.Collection("devices").Aggregate(ctx, pipeline) + if err != nil { + log.WithError(err).Error("Failed to aggregate removed devices count") + + return err + } + defer cursor.Close(ctx) + + updatedNamespaces := 0 + + for cursor.Next(ctx) { + var result struct { + ID string `bson:"_id"` + Count int `bson:"count"` + } + + if err := cursor.Decode(&result); err != nil { + log.WithError(err).Error("Failed to decode aggregation result") + + continue + } + + updateDoc := bson.M{ + "$set": bson.M{ + "devices_removed_count": result.Count, + }, + } + + if _, err := db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": result.ID}, updateDoc); err != nil { + log.WithFields(log.Fields{"tenant_id": result.ID, "error": err}).Error("Failed to update namespace devices_removed_count") + + continue + } + + updatedNamespaces++ + log.WithFields(log.Fields{"tenant_id": result.ID, "removed_count": result.Count}).Info("Updated namespace devices_removed_count") + } + + if err := cursor.Err(); err != nil { + log.WithError(err).Error("Cursor error during migration") + + return err + } + + log.WithFields(log.Fields{"updated_namespaces": updatedNamespaces}).Info("Migration completed successfully") + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 104, "action": "Down"}).Info("Reverting migration") + + updateDoc := bson.M{ + "$unset": bson.M{ + "devices_removed_count": "", + }, + } + + result, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, updateDoc) + if err != nil { + log.WithError(err).Error("Failed to remove devices_removed_count field") + + return err + } + + log.WithFields(log.Fields{"modified_count": result.ModifiedCount}).Info("Removed devices_removed_count field from namespaces") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_104_test.go b/api/store/mongo/migrations/migration_104_test.go new file mode 100644 index 00000000000..57a967dd1a4 --- /dev/null +++ b/api/store/mongo/migrations/migration_104_test.go @@ -0,0 +1,294 @@ +package migrations + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration104Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds adding devices_removed_count field based on existing removed devices", + setup: func() error { + timestamp := time.Now() + + namespaces := []bson.M{ + { + "tenant_id": "tenant-1", + "devices_accepted_count": 5, + "devices_pending_count": 2, + "devices_rejected_count": 1, + }, + { + "tenant_id": "tenant-2", + "devices_accepted_count": 3, + "devices_pending_count": 0, + "devices_rejected_count": 0, + }, + { + "tenant_id": "tenant-3", + "devices_accepted_count": 0, + "devices_pending_count": 0, + "devices_rejected_count": 0, + }, + } + + devices := []bson.M{ + // tenant-1: 3 removed device + { + "uid": "device-1", + "name": "Device 1", + "tenant_id": "tenant-1", + "status": "removed", + "status_updated_at": timestamp, + "created_at": timestamp, + }, + { + "uid": "device-2", + "name": "Device 2", + "tenant_id": "tenant-1", + "status": "removed", + "status_updated_at": timestamp, + "created_at": timestamp, + }, + { + "uid": "device-3", + "name": "Device 3", + "tenant_id": "tenant-1", + "status": "removed", + "status_updated_at": timestamp, + "created_at": timestamp, + }, + // tenant-1: non-removed devices + { + "uid": "device-4", + "name": "Device 4", + "tenant_id": "tenant-1", + "status": "accepted", + "created_at": timestamp, + }, + { + "uid": "device-5", + "name": "Device 5", + "tenant_id": "tenant-1", + "status": "pending", + "created_at": timestamp, + }, + // tenant-2: 1 removed device + { + "uid": "device-6", + "name": "Device 6", + "tenant_id": "tenant-2", + "status": "removed", + "status_updated_at": timestamp, + "created_at": timestamp, + }, + // tenant-2: non-removed devices + { + "uid": "device-7", + "name": "Device 7", + "tenant_id": "tenant-2", + "status": "accepted", + "created_at": timestamp, + }, + // tenant-3: no removed devices, only accepted + { + "uid": "device-8", + "name": "Device 8", + "tenant_id": "tenant-3", + "status": "accepted", + "created_at": timestamp, + }, + } + + if _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0], namespaces[1], namespaces[2]}); err != nil { + return err + } + + rawDevices := []any{devices[0], devices[1], devices[2], devices[3], devices[4], devices[5], devices[6], devices[7]} + _, err := c.Database("test").Collection("devices").InsertMany(ctx, rawDevices) + + return err + }, + verify: func(tt *testing.T) { + namespace1 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-1"}).Decode(&namespace1)) + assert.Equal(tt, int32(5), namespace1["devices_accepted_count"]) + assert.Equal(tt, int32(2), namespace1["devices_pending_count"]) + assert.Equal(tt, int32(1), namespace1["devices_rejected_count"]) + assert.Equal(tt, int32(3), namespace1["devices_removed_count"]) + + namespace2 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-2"}).Decode(&namespace2)) + assert.Equal(tt, int32(3), namespace2["devices_accepted_count"]) + assert.Equal(tt, int32(0), namespace2["devices_pending_count"]) + assert.Equal(tt, int32(0), namespace2["devices_rejected_count"]) + assert.Equal(tt, int32(1), namespace2["devices_removed_count"]) + + namespace3 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-3"}).Decode(&namespace3)) + assert.Equal(tt, int32(0), namespace3["devices_accepted_count"]) + assert.Equal(tt, int32(0), namespace3["devices_pending_count"]) + assert.Equal(tt, int32(0), namespace3["devices_rejected_count"]) + assert.Equal(tt, int32(0), namespace3["devices_removed_count"]) + }, + }, + { + description: "succeeds initializing devices_removed_count to 0 when no removed devices exist", + setup: func() error { + timestamp := time.Now() + + namespaces := []bson.M{ + { + "tenant_id": "tenant-1", + "devices_accepted_count": 2, + "devices_pending_count": 1, + "devices_rejected_count": 0, + }, + } + + // Create devices with no "removed" status + devices := []bson.M{ + { + "uid": "device-1", + "name": "Device 1", + "tenant_id": "tenant-1", + "status": "accepted", + "created_at": timestamp, + }, + { + "uid": "device-2", + "name": "Device 2", + "tenant_id": "tenant-1", + "status": "pending", + "created_at": timestamp, + }, + } + + if _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0]}); err != nil { + return err + } + + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1]}) + + return err + }, + verify: func(tt *testing.T) { + namespace1 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-1"}).Decode(&namespace1)) + assert.Equal(tt, int32(2), namespace1["devices_accepted_count"]) + assert.Equal(tt, int32(1), namespace1["devices_pending_count"]) + assert.Equal(tt, int32(0), namespace1["devices_rejected_count"]) + assert.Equal(tt, int32(0), namespace1["devices_removed_count"]) + }, + }, + { + description: "succeeds with empty collections", + setup: func() error { + namespaces := []bson.M{ + { + "tenant_id": "tenant-1", + "devices_accepted_count": 0, + "devices_pending_count": 0, + "devices_rejected_count": 0, + }, + } + + _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0]}) + + return err + }, + verify: func(tt *testing.T) { + namespace1 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-1"}).Decode(&namespace1)) + assert.Equal(tt, int32(0), namespace1["devices_accepted_count"]) + assert.Equal(tt, int32(0), namespace1["devices_pending_count"]) + assert.Equal(tt, int32(0), namespace1["devices_rejected_count"]) + assert.Equal(tt, int32(0), namespace1["devices_removed_count"]) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[103]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration104Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds removing devices_removed_count field", + setup: func() error { + namespaces := []bson.M{ + { + "tenant_id": "tenant-1", + "devices_accepted_count": 42, + "devices_pending_count": 5, + "devices_rejected_count": 3, + "devices_removed_count": 7, + }, + { + "tenant_id": "tenant-2", + "devices_accepted_count": 10, + "devices_removed_count": 2, + }, + } + _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0], namespaces[1]}) + + return err + }, + verify: func(tt *testing.T) { + namespace1 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-1"}).Decode(&namespace1)) + _, hasRemovedCount := namespace1["devices_removed_count"] + assert.False(tt, hasRemovedCount) + assert.Equal(tt, int32(42), namespace1["devices_accepted_count"]) + assert.Equal(tt, int32(5), namespace1["devices_pending_count"]) + assert.Equal(tt, int32(3), namespace1["devices_rejected_count"]) + + namespace2 := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": "tenant-2"}).Decode(&namespace2)) + _, hasRemovedCount2 := namespace2["devices_removed_count"] + assert.False(tt, hasRemovedCount2) + assert.Equal(tt, int32(10), namespace2["devices_accepted_count"]) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[103]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_105.go b/api/store/mongo/migrations/migration_105.go new file mode 100644 index 00000000000..5ee7efa2847 --- /dev/null +++ b/api/store/mongo/migrations/migration_105.go @@ -0,0 +1,46 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration105 = migrate.Migration{ + Version: 105, + Description: "Drop removed_devices collection as it's no longer needed", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 105, "action": "Up"}).Info("Applying migration") + + collections, err := db.ListCollectionNames(ctx, bson.M{"name": "removed_devices"}) + if err != nil { + log.WithError(err).Error("Failed to list collections") + + return err + } + + if len(collections) == 0 { + log.Info("Collection removed_devices does not exist, skipping drop") + + return nil + } + + if err := db.Collection("removed_devices").Drop(ctx); err != nil { + log.WithError(err).Error("Failed to drop removed_devices collection") + + return err + } + + log.WithFields(log.Fields{"collection": "removed_devices"}).Info("Successfully dropped removed_devices collection") + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 105, "action": "Down"}).Info("Cannot revert migration") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_105_test.go b/api/store/mongo/migrations/migration_105_test.go new file mode 100644 index 00000000000..a0a9c8907f9 --- /dev/null +++ b/api/store/mongo/migrations/migration_105_test.go @@ -0,0 +1,69 @@ +package migrations + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration105Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds dropping removed_devices collection", + setup: func() error { + removedDevice := bson.M{ + "device": bson.M{ + "_id": primitive.NewObjectID(), + "uid": "device-1", + "name": "Device 1", + "tenant_id": "tenant-1", + }, + "timestamp": time.Now(), + } + + _, err := c.Database("test").Collection("removed_devices").InsertOne(ctx, removedDevice) + + return err + }, + verify: func(tt *testing.T) { + collections, err := c.Database("test").ListCollectionNames(ctx, bson.M{"name": "removed_devices"}) + require.NoError(tt, err) + assert.Empty(tt, collections) + }, + }, + { + description: "handles non-existent collection gracefully", + setup: func() error { + return nil + }, + verify: func(tt *testing.T) { + collections, err := c.Database("test").ListCollectionNames(ctx, bson.M{"name": "removed_devices"}) + require.NoError(tt, err) + assert.Empty(tt, collections) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[104]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_106.go b/api/store/mongo/migrations/migration_106.go new file mode 100644 index 00000000000..545a12a0548 --- /dev/null +++ b/api/store/mongo/migrations/migration_106.go @@ -0,0 +1,49 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration106 = migrate.Migration{ + Version: 106, + Description: "Add performance indexes to devices collection for cleanup and status filtering", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 106, "action": "Up"}).Info("Applying migration") + + index := mongo.IndexModel{ + Keys: bson.D{{Key: "status", Value: 1}, {Key: "status_updated_at", Value: 1}}, + Options: options.Index().SetName("idx_status_status_updated_at"), + } + + indexName, err := db.Collection("devices").Indexes().CreateOne(ctx, index) + if err != nil { + log.WithError(err).Error("Failed to create indexes on devices collection") + + return err + } + + log.WithFields(log.Fields{"collection": "devices", "index": indexName}). + Info("Successfully created indexes on devices collection") + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 106, "action": "Down"}).Info("Reverting migration") + + if _, err := db.Collection("devices").Indexes().DropOne(ctx, "idx_status_status_updated_at"); err != nil { + log.WithFields(log.Fields{"index": "idx_status_status_updated_at", "error": err}).Error("Failed to drop index (may not exist)") + + return err + } + + log.WithField("index", "idx_status_status_updated_at").Info("Successfully dropped index") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_106_test.go b/api/store/mongo/migrations/migration_106_test.go new file mode 100644 index 00000000000..41370216f9f --- /dev/null +++ b/api/store/mongo/migrations/migration_106_test.go @@ -0,0 +1,111 @@ +package migrations + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration106Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds creating indexes on devices collection", + setup: func() error { + devices := []bson.M{ + { + "uid": "device-1", + "name": "Device 1", + "tenant_id": "tenant-1", + "status": "removed", + "status_updated_at": time.Now(), + }, + { + "uid": "device-2", + "name": "Device 2", + "tenant_id": "tenant-1", + "status": "accepted", + }, + } + + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("devices").Indexes().List(ctx) + require.NoError(tt, err) + + var indexes []bson.M + require.NoError(tt, cursor.All(ctx, &indexes)) + + indexNames := make([]string, 0, len(indexes)) + for _, index := range indexes { + if name, ok := index["name"].(string); ok { + indexNames = append(indexNames, name) + } + } + + assert.Contains(tt, indexNames, "idx_status_status_updated_at") + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[105]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration106Down(t *testing.T) { + ctx := context.Background() + + t.Run("succeeds dropping created indexes", func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + device := bson.M{ + "uid": "device-1", + "name": "Device 1", + "tenant_id": "tenant-1", + "status": "accepted", + } + _, err := c.Database("test").Collection("devices").InsertOne(ctx, device) + require.NoError(tt, err) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[105]) // migration106 + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + cursor, err := c.Database("test").Collection("devices").Indexes().List(ctx) + require.NoError(tt, err) + + var indexes []bson.M + require.NoError(tt, cursor.All(ctx, &indexes)) + + indexNames := make([]string, 0, len(indexes)) + for _, index := range indexes { + if name, ok := index["name"].(string); ok { + indexNames = append(indexNames, name) + } + } + + assert.NotContains(tt, indexNames, "idx_status_status_updated_at") + assert.Contains(tt, indexNames, "_id_") + }) +} diff --git a/api/store/mongo/migrations/migration_107.go b/api/store/mongo/migrations/migration_107.go new file mode 100644 index 00000000000..b1e04318f60 --- /dev/null +++ b/api/store/mongo/migrations/migration_107.go @@ -0,0 +1,78 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration107 = migrate.Migration{ + Version: 107, + Description: "Restructure SAML signon_url to signon_urls object with post and redirect fields", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 107, "action": "Up"}).Info("Applying migration") + + system := &models.System{} + if err := db.Collection("system").FindOne(ctx, bson.M{}).Decode(system); err != nil { + return err + } + + preferred := "" + if system.Authentication.SAML.Enabled { + preferred = "post" + } + + pipeline := []bson.M{ + { + "$set": bson.M{ + "authentication.saml.idp.binding": bson.M{ + "post": bson.M{"$ifNull": []any{"$authentication.saml.idp.signon_url", ""}}, + "redirect": "", + "preferred": preferred, + }, + }, + }, + { + "$unset": "authentication.saml.idp.signon_url", + }, + } + + if _, err := db.Collection("system").UpdateOne(ctx, bson.M{}, pipeline); err != nil { + log.WithError(err).Error("Failed to update system document") + + return err + } + + log.Info("Successfully restructured SAML signon_url to signon_urls") + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{"component": "migration", "version": 107, "action": "Down"}).Info("Reverting migration") + + pipeline := []bson.M{ + { + "$set": bson.M{ + "authentication.saml.idp.signon_url": "$authentication.saml.idp.binding.post", + }, + }, + { + "$unset": "authentication.saml.idp.binding", + }, + } + + if _, err := db.Collection("system").UpdateOne(ctx, bson.M{}, pipeline); err != nil { + log.WithError(err).Error("Failed to revert system document") + + return err + } + + log.Info("Successfully reverted SAML signon_urls to signon_url") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_107_test.go b/api/store/mongo/migrations/migration_107_test.go new file mode 100644 index 00000000000..bfe91dc0c1f --- /dev/null +++ b/api/store/mongo/migrations/migration_107_test.go @@ -0,0 +1,109 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration107Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds restructuring signon_url to binding when SAML has signon_url", + setup: func() error { + system := bson.M{ + "authentication": bson.M{ + "saml": bson.M{ + "enabled": true, + "idp": bson.M{ + "signon_url": "https://example.com/saml/login", + }, + }, + }, + } + + _, err := c.Database("test").Collection("system").InsertOne(ctx, system) + + return err + }, + verify: func(tt *testing.T) { + system := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("system").FindOne(ctx, bson.M{}).Decode(&system)) + + auth := system["authentication"].(map[string]any) + saml := auth["saml"].(map[string]any) + idp := saml["idp"].(map[string]any) + + _, hasOldURL := idp["signon_url"] + assert.False(tt, hasOldURL) + + binding, hasBinding := idp["binding"] + require.True(tt, hasBinding) + + signonURLsMap := binding.(map[string]any) + assert.Equal(tt, "https://example.com/saml/login", signonURLsMap["post"]) + assert.Equal(tt, "", signonURLsMap["redirect"]) + assert.Equal(tt, "post", signonURLsMap["preferred"]) + }, + }, + { + description: "creates binding even when SAML config doesn't exist", + setup: func() error { + system := bson.M{ + "authentication": bson.M{ + "local": bson.M{ + "enabled": true, + }, + "saml": bson.M{ + "enabled": false, + "idp": bson.M{ + "signon_url": "", + }, + }, + }, + } + + _, err := c.Database("test").Collection("system").InsertOne(ctx, system) + + return err + }, + verify: func(tt *testing.T) { + system := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("system").FindOne(ctx, bson.M{}).Decode(&system)) + + auth := system["authentication"].(map[string]any) + saml, hasSAML := auth["saml"] + require.True(tt, hasSAML) + + samlMap := saml.(map[string]any) + idp := samlMap["idp"].(map[string]any) + + binding := idp["binding"].(map[string]any) + assert.Equal(tt, "", binding["post"]) + assert.Equal(tt, "", binding["redirect"]) + assert.Equal(tt, "", binding["preferred"]) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[106]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_108.go b/api/store/mongo/migrations/migration_108.go new file mode 100644 index 00000000000..c357affa574 --- /dev/null +++ b/api/store/mongo/migrations/migration_108.go @@ -0,0 +1,167 @@ +package migrations + +import ( + "context" + "fmt" + + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration108 = migrate.Migration{ + Version: 108, + Description: "Refactor tags structure in a separeted collection.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + type device struct { + UID string `bson:"uid"` + TenantID string `bson:"tenant_id"` + Tags []string `bson:"tags"` + } + + // Represents either a public key or a firewall rule + type taggedResource struct { + ID primitive.ObjectID `bson:"_id"` + TenantID string `bson:"tenant_id"` + Filter struct { + Tags []string `bson:"tags"` + } `bson:"filter"` + } + + type tag struct { + ID primitive.ObjectID `bson:"_id"` + } + + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 108, + "action": "Up", + }).Info("Applying migration up") + + session, err := db.Client().StartSession() + if err != nil { + return err + } + + defer session.EndSession(ctx) + + _, err = session.WithTransaction(ctx, func(sCtx mongo.SessionContext) (any, error) { + cursor, err := db.Collection("devices").Find(sCtx, bson.M{"uid": bson.M{"$ne": nil}}) + if err != nil { + return nil, err + } + + defer cursor.Close(sCtx) + + tagMapping := make(map[string]map[string]primitive.ObjectID) + for cursor.Next(sCtx) { + d := new(device) + if err := cursor.Decode(d); err != nil { + return nil, err + } + + if tagMapping[d.TenantID] == nil { + tagMapping[d.TenantID] = make(map[string]primitive.ObjectID) + } + + tagIDs := make([]primitive.ObjectID, 0, len(d.Tags)) + for _, tagName := range d.Tags { + if id, exists := tagMapping[d.TenantID][tagName]; exists { + tagIDs = append(tagIDs, id) + + continue + } + + t := new(tag) + if err := db. + Collection("tags"). + FindOneAndUpdate( + sCtx, + bson.M{ + "tenant_id": d.TenantID, + "name": tagName, + }, + bson.M{ + "$setOnInsert": bson.M{"created_at": clock.Now(), "updated_at": clock.Now()}, + "$set": bson.M{"name": tagName, "tenant_id": d.TenantID}, + }, + options.FindOneAndUpdate().SetUpsert(true).SetReturnDocument(options.After), + ). + Decode(t); err != nil { + return nil, err + } + + tagMapping[d.TenantID][tagName] = t.ID + tagIDs = append(tagIDs, t.ID) + } + + if _, err := db. + Collection("devices"). + UpdateOne( + sCtx, + bson.M{"uid": d.UID}, + bson.M{"$set": bson.M{"tag_ids": tagIDs}, "$unset": bson.M{"tags": ""}}, + ); err != nil { + return nil, err + } + } + + if err := cursor.Err(); err != nil { + return nil, err + } + + for _, coll := range []string{"public_keys", "firewall_rules"} { + collCursor, err := db.Collection(coll).Find(sCtx, bson.M{"filter": bson.M{"$exists": true}}) + if err != nil { + return nil, err + } + + defer collCursor.Close(sCtx) + + for collCursor.Next(sCtx) { + res := new(taggedResource) + if err := collCursor.Decode(res); err != nil { + return nil, err + } + + tagIDs := make([]primitive.ObjectID, 0) + for _, tagName := range res.Filter.Tags { + if tagMapping[res.TenantID] == nil || tagMapping[res.TenantID][tagName] == primitive.NilObjectID { + return nil, fmt.Errorf("[%s] document with ID %s references non-existent tag %q", coll, res.ID, tagName) + } + + tagIDs = append(tagIDs, tagMapping[res.TenantID][tagName]) + } + + if _, err := db. + Collection(coll). + UpdateOne( + sCtx, + bson.M{"_id": res.ID}, + bson.M{ + "$set": bson.M{"filter.tag_ids": tagIDs}, + "$unset": bson.M{"filter.tags": ""}, + }, + ); err != nil { + return nil, err + } + } + + if err := collCursor.Err(); err != nil { + return nil, err + } + } + + return nil, nil + }) + + return err + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_108_test.go b/api/store/mongo/migrations/migration_108_test.go new file mode 100644 index 00000000000..0cf260fe842 --- /dev/null +++ b/api/store/mongo/migrations/migration_108_test.go @@ -0,0 +1,426 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration108Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds creating tags collection with unique IDs and correct mapping", + setup: func() error { + devices := []bson.M{ + { + "uid": "device-1", + "tenant_id": "tenant-1", + "tags": []string{"production", "backend", "critical"}, + }, + { + "uid": "device-2", + "tenant_id": "tenant-1", + "tags": []string{"production", "frontend"}, + }, + { + "uid": "device-3", + "tenant_id": "tenant-2", + "tags": []string{"production", "database"}, + }, + { + "uid": "device-4", + "tenant_id": "tenant-1", + "tags": []string{}, + }, + { + "uid": "device-5", + "tenant_id": "tenant-2", + "tags": nil, + }, + } + + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1], devices[2], devices[3], devices[4]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("tags").Find(ctx, bson.M{}) + require.NoError(tt, err) + + tags := make([]bson.M, 0) + require.NoError(tt, cursor.All(ctx, &tags)) + require.Equal(tt, 6, len(tags)) + + tagsMap := make(map[string]map[string]primitive.ObjectID) + for _, tag := range tags { + require.NotNil(tt, tag["created_at"]) + require.NotNil(tt, tag["updated_at"]) + + tenantID := tag["tenant_id"].(string) + name := tag["name"].(string) + id := tag["_id"].(primitive.ObjectID) + + if tagsMap[tenantID] == nil { + tagsMap[tenantID] = make(map[string]primitive.ObjectID) + } + tagsMap[tenantID][name] = id + } + + require.Contains(tt, tagsMap["tenant-1"], "production") + require.Contains(tt, tagsMap["tenant-1"], "backend") + require.Contains(tt, tagsMap["tenant-1"], "critical") + require.Contains(tt, tagsMap["tenant-1"], "frontend") + require.Contains(tt, tagsMap["tenant-2"], "production") + require.Contains(tt, tagsMap["tenant-2"], "database") + }, + }, + { + description: "succeeds migrating device tags to tag_ids with correct ObjectID references", + setup: func() error { + devices := []bson.M{ + { + "uid": "device-1", + "tenant_id": "tenant-1", + "tags": []string{"production", "backend"}, + }, + { + "uid": "device-2", + "tenant_id": "tenant-1", + "tags": []string{"production", "frontend", "backend"}, + }, + { + "uid": "device-3", + "tenant_id": "tenant-2", + "tags": []string{"staging", "backend"}, + }, + { + "uid": "device-4", + "tenant_id": "tenant-1", + "tags": []string{}, + }, + { + "uid": "device-5", + "tenant_id": "tenant-1", + "tags": nil, + }, + } + + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1], devices[2], devices[3], devices[4]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("tags").Find(ctx, bson.M{}) + require.NoError(tt, err) + + tags := make([]bson.M, 0) + require.NoError(tt, cursor.All(ctx, &tags)) + + tagsMap := make(map[string]map[string]primitive.ObjectID) + for _, tag := range tags { + tenantID := tag["tenant_id"].(string) + name := tag["name"].(string) + id := tag["_id"].(primitive.ObjectID) + + if tagsMap[tenantID] == nil { + tagsMap[tenantID] = make(map[string]primitive.ObjectID) + } + tagsMap[tenantID][name] = id + } + + device1 := bson.M{} + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-1"}).Decode(&device1)) + require.Nil(tt, device1["tags"]) + require.Equal(tt, 2, len(device1["tag_ids"].(primitive.A))) + require.Contains(tt, device1["tag_ids"].(primitive.A), tagsMap["tenant-1"]["production"]) + require.Contains(tt, device1["tag_ids"].(primitive.A), tagsMap["tenant-1"]["backend"]) + + device2 := bson.M{} + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-2"}).Decode(&device2)) + require.Nil(tt, device2["tags"]) + require.Equal(tt, 3, len(device2["tag_ids"].(primitive.A))) + require.Contains(tt, device2["tag_ids"].(primitive.A), tagsMap["tenant-1"]["production"]) + require.Contains(tt, device2["tag_ids"].(primitive.A), tagsMap["tenant-1"]["frontend"]) + require.Contains(tt, device2["tag_ids"].(primitive.A), tagsMap["tenant-1"]["backend"]) + + device3 := bson.M{} + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-3"}).Decode(&device3)) + require.Nil(tt, device3["tags"]) + require.Equal(tt, 2, len(device3["tag_ids"].(primitive.A))) + require.Contains(tt, device3["tag_ids"].(primitive.A), tagsMap["tenant-2"]["staging"]) + require.Contains(tt, device3["tag_ids"].(primitive.A), tagsMap["tenant-2"]["backend"]) + + device4 := bson.M{} + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-4"}).Decode(&device4)) + require.Nil(tt, device4["tags"]) + require.Equal(tt, 0, len(device4["tag_ids"].(primitive.A))) + + device5 := bson.M{} + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "device-5"}).Decode(&device5)) + require.Nil(tt, device5["tags"]) + require.Equal(tt, 0, len(device5["tag_ids"].(primitive.A))) + }, + }, + { + description: "succeeds migrating firewall_rules filter.tags to filter.tag_ids", + setup: func() error { + devices := []bson.M{ + { + "uid": "device-1", + "tenant_id": "tenant-1", + "tags": []string{"production", "backend"}, + }, + { + "uid": "device-2", + "tenant_id": "tenant-1", + "tags": []string{"production", "frontend"}, + }, + { + "uid": "device-3", + "tenant_id": "tenant-2", + "tags": []string{"production"}, + }, + } + + firewallRules := []bson.M{ + { + "tenant_id": "tenant-1", + "priority": 1, + "filter": bson.M{ + "tags": []string{"production", "backend"}, + }, + }, + { + "tenant_id": "tenant-1", + "priority": 2, + "filter": bson.M{ + "tags": []string{"production", "frontend"}, + }, + }, + { + "tenant_id": "tenant-2", + "priority": 1, + "filter": bson.M{ + "tags": []string{"production"}, + }, + }, + { + "tenant_id": "tenant-2", + "priority": 2, + "filter": bson.M{ + "tags": []string{}, + }, + }, + { + "tenant_id": "tenant-1", + "priority": 3, + "filter": bson.M{ + "hostname": ".*", + }, + }, + } + + if _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1], devices[2]}); err != nil { + return err + } + + if _, err := c.Database("test").Collection("firewall_rules").InsertMany(ctx, []any{firewallRules[0], firewallRules[1], firewallRules[2], firewallRules[3], firewallRules[4]}); err != nil { // nolint:revive + return err + } + + return nil + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("tags").Find(ctx, bson.M{}) + require.NoError(tt, err) + + tags := make([]bson.M, 0) + require.NoError(tt, cursor.All(ctx, &tags)) + + tagsMap := make(map[string]map[string]primitive.ObjectID) + for _, tag := range tags { + tenantID := tag["tenant_id"].(string) + name := tag["name"].(string) + id := tag["_id"].(primitive.ObjectID) + + if tagsMap[tenantID] == nil { + tagsMap[tenantID] = make(map[string]primitive.ObjectID) + } + tagsMap[tenantID][name] = id + } + + rule1 := bson.M{} + require.NoError(tt, c.Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"priority": 1, "tenant_id": "tenant-1"}).Decode(&rule1)) + require.Nil(tt, rule1["filter"].(bson.M)["tags"]) + require.Equal(tt, 2, len(rule1["filter"].(bson.M)["tag_ids"].(primitive.A))) + require.Contains(tt, rule1["filter"].(bson.M)["tag_ids"].(primitive.A), tagsMap["tenant-1"]["production"]) + require.Contains(tt, rule1["filter"].(bson.M)["tag_ids"].(primitive.A), tagsMap["tenant-1"]["backend"]) + + rule2 := bson.M{} + require.NoError(tt, c.Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"priority": 2, "tenant_id": "tenant-1"}).Decode(&rule2)) + require.Nil(tt, rule2["filter"].(bson.M)["tags"]) + require.Equal(tt, 2, len(rule2["filter"].(bson.M)["tag_ids"].(primitive.A))) + require.Contains(tt, rule2["filter"].(bson.M)["tag_ids"].(primitive.A), tagsMap["tenant-1"]["production"]) + require.Contains(tt, rule2["filter"].(bson.M)["tag_ids"].(primitive.A), tagsMap["tenant-1"]["frontend"]) + + rule3 := bson.M{} + require.NoError(tt, c.Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"priority": 1, "tenant_id": "tenant-2"}).Decode(&rule3)) + require.Nil(tt, rule3["filter"].(bson.M)["tags"]) + require.Equal(tt, 1, len(rule3["filter"].(bson.M)["tag_ids"].(primitive.A))) + require.Contains(tt, rule3["filter"].(bson.M)["tag_ids"].(primitive.A), tagsMap["tenant-2"]["production"]) + + rule4 := bson.M{} + require.NoError(tt, c.Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"priority": 2, "tenant_id": "tenant-2"}).Decode(&rule4)) + require.Nil(tt, rule4["filter"].(bson.M)["tags"]) + require.Equal(tt, 0, len(rule4["filter"].(bson.M)["tag_ids"].(primitive.A))) + + rule5 := bson.M{} + require.NoError(tt, c.Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"priority": 3, "tenant_id": "tenant-1"}).Decode(&rule5)) + require.Nil(tt, rule5["filter"].(bson.M)["tags"]) + require.Equal(tt, 0, len(rule5["filter"].(bson.M)["tag_ids"].(primitive.A))) + require.Equal(tt, ".*", rule5["filter"].(bson.M)["hostname"].(string)) + }, + }, + { + description: "succeeds migrating public_keys filter.tags to filter.tag_ids", + setup: func() error { + devices := []bson.M{ + { + "uid": "device-1", + "tenant_id": "tenant-1", + "tags": []string{"production", "backend"}, + }, + { + "uid": "device-2", + "tenant_id": "tenant-1", + "tags": []string{"production", "frontend"}, + }, + { + "uid": "device-3", + "tenant_id": "tenant-2", + "tags": []string{"production"}, + }, + } + + publicKeys := []bson.M{ + { + "tenant_id": "tenant-1", + "fingerprint": "key-1", + "filter": bson.M{ + "tags": []string{"production", "backend"}, + }, + }, + { + "tenant_id": "tenant-1", + "fingerprint": "key-2", + "filter": bson.M{ + "tags": []string{"production", "frontend"}, + }, + }, + { + "tenant_id": "tenant-2", + "fingerprint": "key-3", + "filter": bson.M{ + "tags": []string{"production"}, + }, + }, + { + "tenant_id": "tenant-2", + "fingerprint": "key-4", + "filter": bson.M{ + "tags": []string{}, + }, + }, + { + "tenant_id": "tenant-1", + "fingerprint": "key-5", + "filter": bson.M{ + "hostname": ".*", + }, + }, + } + + if _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1], devices[2]}); err != nil { + return err + } + + if _, err := c.Database("test").Collection("public_keys").InsertMany(ctx, []any{publicKeys[0], publicKeys[1], publicKeys[2], publicKeys[3], publicKeys[4]}); err != nil { // nolint:revive + return err + } + + return nil + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("tags").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var tags []bson.M + require.NoError(tt, cursor.All(ctx, &tags)) + + tagMap := make(map[string]map[string]primitive.ObjectID) + for _, tag := range tags { + tenantID := tag["tenant_id"].(string) + name := tag["name"].(string) + id := tag["_id"].(primitive.ObjectID) + + if tagMap[tenantID] == nil { + tagMap[tenantID] = make(map[string]primitive.ObjectID) + } + tagMap[tenantID][name] = id + } + + key1 := bson.M{} + require.NoError(tt, c.Database("test").Collection("public_keys").FindOne(ctx, bson.M{"fingerprint": "key-1"}).Decode(&key1)) + require.Nil(tt, key1["filter"].(bson.M)["tags"]) + require.Equal(tt, 2, len(key1["filter"].(bson.M)["tag_ids"].(primitive.A))) + require.Contains(tt, key1["filter"].(bson.M)["tag_ids"].(primitive.A), tagMap["tenant-1"]["production"]) + require.Contains(tt, key1["filter"].(bson.M)["tag_ids"].(primitive.A), tagMap["tenant-1"]["backend"]) + + key2 := bson.M{} + require.NoError(tt, c.Database("test").Collection("public_keys").FindOne(ctx, bson.M{"fingerprint": "key-2"}).Decode(&key2)) + require.Nil(tt, key2["filter"].(bson.M)["tags"]) + require.Equal(tt, 2, len(key2["filter"].(bson.M)["tag_ids"].(primitive.A))) + require.Contains(tt, key2["filter"].(bson.M)["tag_ids"].(primitive.A), tagMap["tenant-1"]["production"]) + require.Contains(tt, key2["filter"].(bson.M)["tag_ids"].(primitive.A), tagMap["tenant-1"]["frontend"]) + + key3 := bson.M{} + require.NoError(tt, c.Database("test").Collection("public_keys").FindOne(ctx, bson.M{"fingerprint": "key-3"}).Decode(&key3)) + require.Nil(tt, key3["filter"].(bson.M)["tags"]) + require.Equal(tt, 1, len(key3["filter"].(bson.M)["tag_ids"].(primitive.A))) + require.Contains(tt, key3["filter"].(bson.M)["tag_ids"].(primitive.A), tagMap["tenant-2"]["production"]) + + key4 := bson.M{} + require.NoError(tt, c.Database("test").Collection("public_keys").FindOne(ctx, bson.M{"fingerprint": "key-4"}).Decode(&key4)) + require.Nil(tt, key4["filter"].(bson.M)["tags"]) + require.Equal(tt, 0, len(key4["filter"].(bson.M)["tag_ids"].(primitive.A))) + + key5 := bson.M{} + require.NoError(tt, c.Database("test").Collection("public_keys").FindOne(ctx, bson.M{"fingerprint": "key-5"}).Decode(&key5)) + require.Nil(tt, key5["filter"].(bson.M)["tags"]) + require.Equal(tt, 0, len(key5["filter"].(bson.M)["tag_ids"].(primitive.A))) + require.Equal(tt, ".*", key5["filter"].(bson.M)["hostname"].(string)) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { require.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[107]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_109.go b/api/store/mongo/migrations/migration_109.go new file mode 100644 index 00000000000..595694ae4d2 --- /dev/null +++ b/api/store/mongo/migrations/migration_109.go @@ -0,0 +1,53 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration109 = migrate.Migration{ + Version: 109, + Description: "Add indexes to tags collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 109, + "action": "Up", + }).Info("Applying migration up") + + _, err := db.Collection("tags").Indexes().CreateMany(ctx, []mongo.IndexModel{ + { + Keys: bson.D{{Key: "tenant_id", Value: 1}, {Key: "name", Value: 1}}, + Options: options.Index().SetUnique(true).SetName("idx_tenant_id_name_unique"), + }, + { + Keys: bson.D{{Key: "tenant_id", Value: 1}}, + Options: options.Index().SetName("idx_tenant_id"), + }, + }) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 109, + "action": "Down", + }).Info("Reverting migration down") + + if _, err := db.Collection("tags").Indexes().DropOne(ctx, "idx_tenant_id_name_unique"); err != nil { + return err + } + + if _, err := db.Collection("tags").Indexes().DropOne(ctx, "idx_tenant_id"); err != nil { + return err + } + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_109_test.go b/api/store/mongo/migrations/migration_109_test.go new file mode 100644 index 00000000000..9286c3aaeeb --- /dev/null +++ b/api/store/mongo/migrations/migration_109_test.go @@ -0,0 +1,92 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration109Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds creating indexes on tags collection", + setup: func() error { + tags := []bson.M{{"tenant_id": "tenant-1", "name": "tag1"}, {"tenant_id": "tenant-1", "name": "tag2"}} + _, err := c.Database("test").Collection("tags").InsertMany(ctx, []any{tags[0], tags[1]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("tags").Indexes().List(ctx) + require.NoError(tt, err) + + var indexes []bson.M + require.NoError(tt, cursor.All(ctx, &indexes)) + + indexNames := make([]string, 0, len(indexes)) + for _, index := range indexes { + if name, ok := index["name"].(string); ok { + indexNames = append(indexNames, name) + } + } + + assert.Contains(tt, indexNames, "idx_tenant_id_name_unique") + assert.Contains(tt, indexNames, "idx_tenant_id") + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[108]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration109Down(t *testing.T) { + ctx := context.Background() + + t.Run("succeeds dropping created indexes", func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + tag := bson.M{"tenant_id": "tenant-1", "name": "tag1"} + _, err := c.Database("test").Collection("tags").InsertOne(ctx, tag) + require.NoError(tt, err) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[108]) // migration109 + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + cursor, err := c.Database("test").Collection("tags").Indexes().List(ctx) + require.NoError(tt, err) + + var indexes []bson.M + require.NoError(tt, cursor.All(ctx, &indexes)) + + indexNames := make([]string, 0, len(indexes)) + for _, index := range indexes { + if name, ok := index["name"].(string); ok { + indexNames = append(indexNames, name) + } + } + + assert.NotContains(tt, indexNames, "idx_tenant_id_name_unique") + assert.NotContains(tt, indexNames, "idx_tenant_id") + assert.Contains(tt, indexNames, "_id_") + }) +} diff --git a/api/store/mongo/migrations/migration_10_test.go b/api/store/mongo/migrations/migration_10_test.go index b6005ea8a9b..e426f8a0d92 100644 --- a/api/store/mongo/migrations/migration_10_test.go +++ b/api/store/mongo/migrations/migration_10_test.go @@ -4,20 +4,17 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration10(t *testing.T) { - logrus.Info("Testing Migration 10 - Test if the session_record is not unique") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:9]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:9]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) user1 := struct { @@ -48,13 +45,13 @@ func TestMigration10(t *testing.T) { SessionRecord: true, } - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user1) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user2) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user2) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:10]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:10]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_11.go b/api/store/mongo/migrations/migration_11.go index 305931ad0d6..4bbdf83148b 100644 --- a/api/store/mongo/migrations/migration_11.go +++ b/api/store/mongo/migrations/migration_11.go @@ -13,7 +13,7 @@ import ( var migration11 = migrate.Migration{ Version: 11, Description: "Create a ttl for the private_keys collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 11, @@ -23,21 +23,21 @@ var migration11 = migrate.Migration{ Keys: bson.D{{"created_at", 1}}, Options: options.Index().SetName("ttl").SetExpireAfterSeconds(60), } - _, err := db.Collection("private_keys").Indexes().CreateOne(context.TODO(), mod) + _, err := db.Collection("private_keys").Indexes().CreateOne(ctx, mod) if err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 11, "action": "Down", }).Info("Applying migration") - _, err := db.Collection("private_keys").Indexes().DropOne(context.TODO(), "ttl") + _, err := db.Collection("private_keys").Indexes().DropOne(ctx, "ttl") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_110.go b/api/store/mongo/migrations/migration_110.go new file mode 100644 index 00000000000..a4e8a057273 --- /dev/null +++ b/api/store/mongo/migrations/migration_110.go @@ -0,0 +1,35 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration110 = migrate.Migration{ + Version: 110, + Description: "Remove all devices with status=removed", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 110, + "action": "Up", + }).Info("Applying migration up") + + _, err := db.Collection("devices").DeleteMany(ctx, bson.M{"status": "removed"}) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 110, + "action": "Down", + }).Warn("Nothing to do on down migration (cannot restore deleted devices)") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_110_test.go b/api/store/mongo/migrations/migration_110_test.go new file mode 100644 index 00000000000..9e2cc9cf52a --- /dev/null +++ b/api/store/mongo/migrations/migration_110_test.go @@ -0,0 +1,57 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration110Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "removes all devices with status=removed", + setup: func() error { + devices := []bson.M{ + {"_id": "d1", "status": "accepted"}, + {"_id": "d2", "status": "removed"}, + {"_id": "d3", "status": "removed"}, + } + + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1], devices[2]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("devices").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var result []bson.M + require.NoError(tt, cursor.All(ctx, &result)) + + assert.Len(tt, result, 1) + assert.Equal(tt, "accepted", result[0]["status"]) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[109]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_111.go b/api/store/mongo/migrations/migration_111.go new file mode 100644 index 00000000000..7c5b7f647e4 --- /dev/null +++ b/api/store/mongo/migrations/migration_111.go @@ -0,0 +1,37 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration111 = migrate.Migration{ + Version: 111, + Description: "Add removed_at field with null value to all devices", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 111, + "action": "Up", + }).Info("Applying migration up") + + _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"removed_at": nil}}) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 111, + "action": "Down", + }).Info("Reverting migration down") + + _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"removed_at": ""}}) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_111_test.go b/api/store/mongo/migrations/migration_111_test.go new file mode 100644 index 00000000000..bcecc54147b --- /dev/null +++ b/api/store/mongo/migrations/migration_111_test.go @@ -0,0 +1,99 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration111Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds adding removed_at=null to all devices", + setup: func() error { + devices := []bson.M{{"_id": "d1", "status": "active"}, {"_id": "d2", "status": "inactive"}} + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("devices").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var results []bson.M + require.NoError(tt, cursor.All(ctx, &results)) + + for _, d := range results { + val, exists := d["removed_at"] + assert.True(tt, exists) + assert.Nil(tt, val) + } + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[110]) // migration111 + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration111Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds removing removed_at field from all devices", + setup: func() error { + devices := []bson.M{{"_id": "d1", "status": "active", "removed_at": nil}, {"_id": "d2", "status": "inactive", "removed_at": nil}} + _, err := c.Database("test").Collection("devices").InsertMany(ctx, []any{devices[0], devices[1]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("devices").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var results []bson.M + require.NoError(tt, cursor.All(ctx, &results)) + + for _, d := range results { + _, exists := d["removed_at"] + assert.False(tt, exists) + } + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[110]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_112.go b/api/store/mongo/migrations/migration_112.go new file mode 100644 index 00000000000..ad06c192548 --- /dev/null +++ b/api/store/mongo/migrations/migration_112.go @@ -0,0 +1,89 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration112 = migrate.Migration{ + Version: 112, + Description: "Remove events subdocument from sessions collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 112, + "action": "Up", + }).Info("Applying migration up") + + if _, err := db.Collection("sessions").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"events": ""}}); err != nil { // nolint:revive + return err + } + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 112, + "action": "Down", + }).Info("Reverting migration down") + + pipeline := []bson.M{ + { + "$lookup": bson.M{ + "from": "sessions_events", + "let": bson.M{"sessionUID": "$uid"}, + "pipeline": []bson.M{ + { + "$match": bson.M{ + "$expr": bson.M{"$eq": []string{"$session", "$$sessionUID"}}, + }, + }, + { + "$group": bson.M{ + "_id": nil, + "types": bson.M{"$addToSet": "$type"}, + "seats": bson.M{"$addToSet": "$seat"}, + }, + }, + }, + "as": "eventData", + }, + }, + { + "$set": bson.M{ + "events": bson.M{ + "$cond": bson.M{ + "if": bson.M{"$gt": []any{bson.M{"$size": "$eventData"}, 0}}, + "then": bson.M{ + "types": bson.M{"$arrayElemAt": []any{"$eventData.types", 0}}, + "seats": bson.M{"$arrayElemAt": []any{"$eventData.seats", 0}}, + }, + "else": bson.M{ + "types": []string{}, + "seats": []int{}, + }, + }, + }, + }, + }, + { + "$unset": "eventData", + }, + { + "$merge": bson.M{ + "into": "sessions", + "whenMatched": "replace", + }, + }, + } + + _, err := db.Collection("sessions").Aggregate(ctx, pipeline) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_112_test.go b/api/store/mongo/migrations/migration_112_test.go new file mode 100644 index 00000000000..def1d834f0a --- /dev/null +++ b/api/store/mongo/migrations/migration_112_test.go @@ -0,0 +1,206 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration112Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds removing events subdocument from sessions", + setup: func() error { + sessions := []bson.M{ + { + "uid": "session-1", + "username": "user1", + "events": bson.M{ + "types": []string{"shell", "pty-req"}, + "seats": []int{1, 2}, + }, + }, + { + "uid": "session-2", + "username": "user2", + "events": bson.M{ + "types": []string{"exec"}, + "seats": []int{1}, + }, + }, + } + _, err := c.Database("test").Collection("sessions").InsertMany(ctx, []any{sessions[0], sessions[1]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("sessions").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var sessions []bson.M + require.NoError(tt, cursor.All(ctx, &sessions)) + + for _, session := range sessions { + _, exists := session["events"] + assert.False(tt, exists, "events field should have been removed") + } + }, + }, + { + description: "succeeds when sessions have no events subdocument", + setup: func() error { + sessions := []bson.M{ + { + "uid": "session-3", + "username": "user3", + }, + } + _, err := c.Database("test").Collection("sessions").InsertMany(ctx, []any{sessions[0]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("sessions").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var sessions []bson.M + require.NoError(tt, cursor.All(ctx, &sessions)) + + for _, session := range sessions { + _, exists := session["events"] + assert.False(tt, exists, "events field should not exist") + } + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[111]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration112Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds reconstructing events subdocument from sessions_events", + setup: func() error { + sessions := []bson.M{ + {"uid": "session-1", "username": "user1"}, + {"uid": "session-2", "username": "user2"}, + } + _, err := c.Database("test").Collection("sessions").InsertMany(ctx, []any{sessions[0], sessions[1]}) + if err != nil { + return err + } + + events := []bson.M{ + {"session": "session-1", "type": "shell", "seat": 1}, + {"session": "session-1", "type": "pty-req", "seat": 1}, + {"session": "session-1", "type": "shell", "seat": 2}, + {"session": "session-2", "type": "exec", "seat": 1}, + } + _, err = c.Database("test").Collection("sessions_events").InsertMany(ctx, []any{events[0], events[1], events[2], events[3]}) + + return err + }, + verify: func(tt *testing.T) { + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[111]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + cursor, err := c.Database("test").Collection("sessions").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var sessions []bson.M + require.NoError(tt, cursor.All(ctx, &sessions)) + + sessionEventsByUID := make(map[string]bson.M) + for _, session := range sessions { + uid := session["uid"].(string) + sessionEventsByUID[uid] = session + } + + session1Events := sessionEventsByUID["session-1"]["events"].(bson.M) + types1 := session1Events["types"].(bson.A) + seats1 := session1Events["seats"].(bson.A) + + assert.Len(tt, types1, 2) + assert.Contains(tt, types1, "shell") + assert.Contains(tt, types1, "pty-req") + + assert.Len(tt, seats1, 2) + assert.Contains(tt, seats1, int32(1)) + assert.Contains(tt, seats1, int32(2)) + + session2Events := sessionEventsByUID["session-2"]["events"].(bson.M) + types2 := session2Events["types"].(bson.A) + seats2 := session2Events["seats"].(bson.A) + + assert.Len(tt, types2, 1) + assert.Contains(tt, types2, "exec") + + assert.Len(tt, seats2, 1) + assert.Contains(tt, seats2, int32(1)) + }, + }, + { + description: "succeeds creating empty events for sessions without events", + setup: func() error { + sessions := []bson.M{{"uid": "session-3", "username": "user3"}} + _, err := c.Database("test").Collection("sessions").InsertMany(ctx, []any{sessions[0]}) + + return err + }, + verify: func(tt *testing.T) { + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[111]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + var session bson.M + err := c.Database("test").Collection("sessions").FindOne(ctx, bson.M{"uid": "session-3"}).Decode(&session) + require.NoError(tt, err) + + events := session["events"].(bson.M) + types := events["types"].(bson.A) + seats := events["seats"].(bson.A) + + assert.Len(tt, types, 0) + assert.Len(tt, seats, 0) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_113.go b/api/store/mongo/migrations/migration_113.go new file mode 100644 index 00000000000..061bc5b85cd --- /dev/null +++ b/api/store/mongo/migrations/migration_113.go @@ -0,0 +1,37 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration113 = migrate.Migration{ + Version: 113, + Description: "Add tls structure with enabled, verify, and domain fields to web_endpoints collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 113, + "action": "Up", + }).Info("Applying migration up") + + _, err := db.Collection("tunnels").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"tls": bson.M{"enabled": false, "verify": false, "domain": ""}}}) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 113, + "action": "Down", + }).Info("Reverting migration down") + + _, err := db.Collection("tunnels").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"tls": ""}}) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_113_test.go b/api/store/mongo/migrations/migration_113_test.go new file mode 100644 index 00000000000..6464c1b6152 --- /dev/null +++ b/api/store/mongo/migrations/migration_113_test.go @@ -0,0 +1,134 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration113Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds adding tls structure with enabled, verify, and domain fields to all web_endpoints", + setup: func() error { + endpoints := []bson.M{ + {"address": "endpoint1", "host": "192.168.1.1", "port": 8080}, + {"address": "endpoint2", "host": "192.168.1.2", "port": 8081}, + } + _, err := c.Database("test").Collection("tunnels").InsertMany(ctx, []any{endpoints[0], endpoints[1]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("tunnels").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var results []bson.M + require.NoError(tt, cursor.All(ctx, &results)) + + for _, endpoint := range results { + tls, tlsExists := endpoint["tls"] + assert.True(tt, tlsExists) + + tlsObj, ok := tls.(bson.M) + require.True(tt, ok) + + enabled, enabledExists := tlsObj["enabled"] + assert.True(tt, enabledExists) + assert.Equal(tt, false, enabled) + + verify, verifyExists := tlsObj["verify"] + assert.True(tt, verifyExists) + assert.Equal(tt, false, verify) + + domain, domainExists := tlsObj["domain"] + assert.True(tt, domainExists) + assert.Equal(tt, "", domain) + } + }, + }, + { + description: "succeeds when web_endpoints collection is empty", + setup: func() error { + return nil + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("tunnels").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var results []bson.M + require.NoError(tt, cursor.All(ctx, &results)) + + assert.Equal(tt, 0, len(results)) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[112]) // migration113 + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration113Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds removing tls structure from all web_endpoints", + setup: func() error { + endpoints := []bson.M{ + {"address": "endpoint1", "host": "192.168.1.1", "port": 8080, "tls": bson.M{"enabled": true, "verify": true, "domain": "example.com"}}, + {"address": "endpoint2", "host": "192.168.1.2", "port": 8081, "tls": bson.M{"enabled": false, "verify": false, "domain": "test.com"}}, + } + _, err := c.Database("test").Collection("tunnels").InsertMany(ctx, []any{endpoints[0], endpoints[1]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("tunnels").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var results []bson.M + require.NoError(tt, cursor.All(ctx, &results)) + + for _, endpoint := range results { + _, tlsExists := endpoint["tls"] + assert.False(tt, tlsExists) + } + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[112]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_114.go b/api/store/mongo/migrations/migration_114.go new file mode 100644 index 00000000000..5c133cc581b --- /dev/null +++ b/api/store/mongo/migrations/migration_114.go @@ -0,0 +1,46 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration114 = migrate.Migration{ + Version: 114, + Description: "Add admin field to users collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 114, + "action": "Up", + }).Info("Applying migration up: Adding super_admin field to all users") + + _, err := db.Collection("users").UpdateMany( + ctx, + bson.M{}, + bson.M{"$set": bson.M{"admin": false}}, + ) + + return err + }), + + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 114, + "action": "Down", + }).Info("Reverting migration down: Removing super_admin field from all users") + + _, err := db.Collection("users").UpdateMany( + ctx, + bson.M{}, + bson.M{"$unset": bson.M{"admin": ""}}, + ) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_114_test.go b/api/store/mongo/migrations/migration_114_test.go new file mode 100644 index 00000000000..7e98c40d450 --- /dev/null +++ b/api/store/mongo/migrations/migration_114_test.go @@ -0,0 +1,124 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration114Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds adding admin=false to all users", + setup: func() error { + users := []bson.M{ + {"_id": "u1", "username": "user1", "email": "user1@example.com"}, + {"_id": "u2", "username": "user2", "email": "user2@example.com"}, + } + + _, err := c.Database("test").Collection("users").InsertMany(ctx, []any{users[0], users[1]}) + + return err + }, + + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("users").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var results []bson.M + require.NoError(tt, cursor.All(ctx, &results)) + + for _, user := range results { + val, exists := user["admin"] + assert.True(tt, exists, "admin field should exist") + assert.Equal(tt, false, val, "admin should be false") + } + }, + }, + + { + description: "succeeds when users collection is empty", + setup: func() error { + return nil + }, + verify: func(tt *testing.T) { + count, err := c.Database("test").Collection("users").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + + assert.Equal(tt, int64(0), count, "users collection should be empty") + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[113]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} + +func TestMigration114Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds removing admin field from all users", + setup: func() error { + users := []bson.M{ + {"_id": "u1", "username": "user1", "email": "user1@example.com", "admin": false}, + {"_id": "u2", "username": "user2", "email": "user2@example.com", "admin": true}, + } + + _, err := c.Database("test").Collection("users").InsertMany(ctx, []any{users[0], users[1]}) + + return err + }, + + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("users").Find(ctx, bson.M{}) + require.NoError(tt, err) + + var results []bson.M + require.NoError(tt, cursor.All(ctx, &results)) + + for _, user := range results { + _, exists := user["admin"] + assert.False(tt, exists, "admin field should not exist") + } + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { assert.NoError(tt, srv.Reset()) }) + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[113]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_115.go b/api/store/mongo/migrations/migration_115.go new file mode 100644 index 00000000000..4b1bb4f8fb1 --- /dev/null +++ b/api/store/mongo/migrations/migration_115.go @@ -0,0 +1,176 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration115 = migrate.Migration{ + Version: 115, + Description: "Migrate user invitations from users collection to user_invitations collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 115, + "action": "Up", + }).Info("Applying migration up") + + session, err := db.Client().StartSession() + if err != nil { + return err + } + defer session.EndSession(ctx) + + _, err = session.WithTransaction(ctx, func(sCtx mongo.SessionContext) (any, error) { + cursor, err := db.Collection("users").Find(sCtx, bson.M{"status": "invited"}) + if err != nil { + log.WithError(err).Error("Failed to find invited users") + + return nil, err + } + + defer cursor.Close(sCtx) + + invitations := make([]any, 0) + for cursor.Next(sCtx) { + user := make(bson.M) + if err := cursor.Decode(&user); err != nil { + log.WithError(err).Error("Failed to decode user document") + + return nil, err + } + + invitations = append(invitations, bson.M{ + "_id": user["_id"], + "email": user["email"], + "created_at": user["created_at"], + "updated_at": user["created_at"], + "invitations": 1, + "status": "pending", + }) + } + + if err := cursor.Err(); err != nil { + log.WithError(err).Error("Cursor error while iterating invited users") + + return nil, err + } + + if len(invitations) > 0 { + _, err = db.Collection("user_invitations").InsertMany(sCtx, invitations) + if err != nil { + log.WithError(err).Error("Failed to insert invitations") + + return nil, err + } + + log.WithField("count", len(invitations)).Info("Successfully migrated invitations to user_invitations collection") + } else { + log.Info("No invited users found to migrate") + } + + deleteResult, err := db.Collection("users").DeleteMany(sCtx, bson.M{"status": "invited"}) + if err != nil { + log.WithError(err).Error("Failed to delete invited users") + + return nil, err + } + + log.WithField("deleted_count", deleteResult.DeletedCount).Info("Successfully removed invited users from users collection") + + return nil, nil + }) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 115, + "action": "Down", + }).Info("Applying migration down") + + session, err := db.Client().StartSession() + if err != nil { + return err + } + defer session.EndSession(ctx) + + _, err = session.WithTransaction(ctx, func(sCtx mongo.SessionContext) (any, error) { + cursor, err := db.Collection("user_invitations").Find(sCtx, bson.M{}) + if err != nil { + log.WithError(err).Error("Failed to find user invitations") + + return nil, err + } + defer cursor.Close(sCtx) + + var users []any + for cursor.Next(sCtx) { + invitation := make(bson.M) + if err := cursor.Decode(&invitation); err != nil { + log.WithError(err).Error("Failed to decode invitation document") + + return nil, err + } + + users = append(users, bson.M{ + "_id": invitation["_id"], + "email": invitation["email"], + "created_at": invitation["created_at"], + "last_login": nil, + "status": "invited", + "origin": nil, + "external_id": nil, + "max_namespaces": nil, + "name": nil, + "username": nil, + "recovery_email": nil, + "email_marketing": nil, + "password": nil, + "preferences": bson.M{"preferred_namespace": nil, "auth_methods": nil}, + "mfa": bson.M{"enabled": nil, "secret": nil, "recovery_codes": nil}, + "admin": nil, + }) + } + + if err := cursor.Err(); err != nil { + log.WithError(err).Error("Cursor error while iterating invitations") + + return nil, err + } + + if len(users) > 0 { + _, err = db.Collection("users").InsertMany(sCtx, users) + if err != nil { + log.WithError(err).Error("Failed to insert users") + + return nil, err + } + + log.WithField("count", len(users)).Info("Successfully restored invited users to users collection") + } else { + log.Info("No invitations found to revert") + } + + return nil, nil + }) + if err != nil { + return err + } + + if err := db.Collection("user_invitations").Drop(ctx); err != nil { + log.WithError(err).Error("Failed to drop user_invitations collection") + + return err + } + + log.Info("Successfully dropped user_invitations collection") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_115_test.go b/api/store/mongo/migrations/migration_115_test.go new file mode 100644 index 00000000000..b2c1448d201 --- /dev/null +++ b/api/store/mongo/migrations/migration_115_test.go @@ -0,0 +1,255 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration115Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds migrating invited users to user_invitations collection", + setup: func() error { + users := []bson.M{ + { + "_id": primitive.NewObjectID(), + "email": "invited1@test.com", + "created_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "status": "invited", + "name": nil, + "username": nil, + }, + { + "_id": primitive.NewObjectID(), + "email": "invited2@test.com", + "created_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "status": "invited", + "name": nil, + "username": nil, + }, + { + "_id": primitive.NewObjectID(), + "email": "confirmed@test.com", + "created_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "status": "confirmed", + "name": "John Doe", + "username": "johndoe", + }, + } + + _, err := c.Database("test").Collection("users").InsertMany(ctx, []any{users[0], users[1], users[2]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("user_invitations").Find(ctx, bson.M{}) + require.NoError(tt, err) + + invitations := make([]bson.M, 0) + require.NoError(tt, cursor.All(ctx, &invitations)) + require.Equal(tt, 2, len(invitations)) + + for _, invitation := range invitations { + require.NotNil(tt, invitation["_id"]) + require.NotNil(tt, invitation["email"]) + require.NotNil(tt, invitation["created_at"]) + require.NotNil(tt, invitation["updated_at"]) + require.Contains(tt, []string{"invited1@test.com", "invited2@test.com"}, invitation["email"]) + require.Equal(tt, int32(1), invitation["invitations"]) + require.Equal(tt, "pending", invitation["status"]) + + require.Nil(tt, invitation["name"]) + require.Nil(tt, invitation["username"]) + } + + userCursor, err := c.Database("test").Collection("users").Find(ctx, bson.M{"status": "invited"}) + require.NoError(tt, err) + + invitedUsers := make([]bson.M, 0) + require.NoError(tt, userCursor.All(ctx, &invitedUsers)) + require.Equal(tt, 0, len(invitedUsers)) + + confirmedCursor, err := c.Database("test").Collection("users").Find(ctx, bson.M{"status": "confirmed"}) + require.NoError(tt, err) + + confirmedUsers := make([]bson.M, 0) + require.NoError(tt, confirmedCursor.All(ctx, &confirmedUsers)) + require.Equal(tt, 1, len(confirmedUsers)) + require.Equal(tt, "confirmed@test.com", confirmedUsers[0]["email"]) + }, + }, + { + description: "handles empty users collection gracefully", + setup: func() error { + return nil + }, + verify: func(tt *testing.T) { + count, err := c.Database("test").Collection("user_invitations").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), count) + + userCount, err := c.Database("test").Collection("users").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), userCount) + }, + }, + { + description: "handles users collection with no invited users", + setup: func() error { + users := []bson.M{ + { + "_id": primitive.NewObjectID(), + "email": "confirmed1@test.com", + "created_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "status": "confirmed", + "name": "Jane Doe", + "username": "janedoe", + }, + { + "_id": primitive.NewObjectID(), + "email": "not-confirmed@test.com", + "created_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "status": "not-confirmed", + "name": "Bob Smith", + "username": "bobsmith", + }, + } + + _, err := c.Database("test").Collection("users").InsertMany(ctx, []any{users[0], users[1]}) + + return err + }, + verify: func(tt *testing.T) { + invitationCount, err := c.Database("test").Collection("user_invitations").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), invitationCount) + + userCount, err := c.Database("test").Collection("users").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(2), userCount) + + invitedCount, err := c.Database("test").Collection("users").CountDocuments(ctx, bson.M{"status": "invited"}) + require.NoError(tt, err) + require.Equal(tt, int64(0), invitedCount) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { require.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[114]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} + +func TestMigration115Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds reverting user_invitations back to users collection with invited status", + setup: func() error { + invitations := []bson.M{ + { + "_id": primitive.NewObjectID(), + "email": "invited1@test.com", + "created_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "updated_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "invitations": 1, + "status": "pending", + }, + { + "_id": primitive.NewObjectID(), + "email": "invited2@test.com", + "created_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "updated_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "invitations": 2, + "status": "accepted", + }, + } + + _, err := c.Database("test").Collection("user_invitations").InsertMany(ctx, []any{invitations[0], invitations[1]}) + if err != nil { + return err + } + + _, err = c.Database("test").Collection("user_invitations").CountDocuments(ctx, bson.M{}) + if err != nil { + return err + } + + return nil + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("users").Find(ctx, bson.M{"status": "invited"}) + require.NoError(tt, err) + + users := make([]bson.M, 0) + require.NoError(tt, cursor.All(ctx, &users)) + require.Equal(tt, 2, len(users)) + + for _, user := range users { + require.NotNil(tt, user["_id"]) + require.NotNil(tt, user["email"]) + require.NotNil(tt, user["created_at"]) + require.Contains(tt, []string{"invited1@test.com", "invited2@test.com"}, user["email"]) + require.Equal(tt, "invited", user["status"]) + require.Nil(tt, user["name"]) + require.Nil(tt, user["username"]) + require.Nil(tt, user["last_login"]) + } + + count, err := c.Database("test").Collection("user_invitations").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), count) + }, + }, + { + description: "handles empty user_invitations collection gracefully", + setup: func() error { + return nil + }, + verify: func(tt *testing.T) { + userCount, err := c.Database("test").Collection("users").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), userCount) + + invitationCount, err := c.Database("test").Collection("user_invitations").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), invitationCount) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { require.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[114]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_116.go b/api/store/mongo/migrations/migration_116.go new file mode 100644 index 00000000000..d8ff4e0bc9d --- /dev/null +++ b/api/store/mongo/migrations/migration_116.go @@ -0,0 +1,51 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration116 = migrate.Migration{ + Version: 116, + Description: "Create unique index on email field in user_invitations collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 116, + "action": "Up", + }).Info("Applying migration up") + + indexModel := mongo.IndexModel{Keys: bson.M{"email": 1}, Options: options.Index().SetName("email").SetUnique(true)} + if _, err := db.Collection("user_invitations").Indexes().CreateOne(ctx, indexModel); err != nil { + log.WithError(err).Error("Failed to create email index on user_invitations collection") + + return err + } + + log.Info("Successfully created unique email index on user_invitations collection") + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 116, + "action": "Down", + }).Info("Applying migration down") + + if _, err := db.Collection("user_invitations").Indexes().DropOne(ctx, "email"); err != nil { + log.WithError(err).Error("Failed to drop email index from user_invitations collection") + + return err + } + + log.Info("Successfully dropped email index from user_invitations collection") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_117.go b/api/store/mongo/migrations/migration_117.go new file mode 100644 index 00000000000..10e133208e0 --- /dev/null +++ b/api/store/mongo/migrations/migration_117.go @@ -0,0 +1,47 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/envs" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration117 = migrate.Migration{ + Version: 117, + Description: "Set setup field in system collection for non-cloud environments", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 117, + "action": "Up", + }).Info("Applying migration up") + + if envs.IsCloud() { + return nil + } + + usersCount, err := db.Collection("users").CountDocuments(ctx, bson.M{}) + if err != nil { + return err + } + + _, err = db.Collection("system").UpdateOne(ctx, bson.M{}, bson.M{"$set": bson.M{"setup": usersCount > 0}}) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 117, + "action": "Down", + }).Info("Applying migration down") + + log.Info("Unable to undo setup field changes") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_118.go b/api/store/mongo/migrations/migration_118.go new file mode 100644 index 00000000000..695f554ed9b --- /dev/null +++ b/api/store/mongo/migrations/migration_118.go @@ -0,0 +1,129 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration118 = migrate.Migration{ + Version: 118, + Description: "Migrate member invitations from namespaces members array to membership_invitations collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 118, + "action": "Up", + }).Info("Applying migration up") + + session, err := db.Client().StartSession() + if err != nil { + return err + } + defer session.EndSession(ctx) + + _, err = session.WithTransaction(ctx, func(sCtx mongo.SessionContext) (any, error) { + cursor, err := db.Collection("namespaces").Find(sCtx, bson.M{}) + if err != nil { + log.WithError(err).Error("Failed to find namespaces") + + return nil, err + } + + defer cursor.Close(sCtx) + + invitations := make([]any, 0) + namespacesToUpdate := make([]bson.M, 0) + + for cursor.Next(sCtx) { + namespace := make(bson.M) + if err := cursor.Decode(&namespace); err != nil { + log.WithError(err).Error("Failed to decode namespace document") + + return nil, err + } + + if members, ok := namespace["members"].(bson.A); ok { + updatedMembers := make(bson.A, 0) + for _, m := range members { + if member, ok := m.(bson.M); ok { + if member["role"] != "owner" { + invitations = append( + invitations, + bson.M{ + "tenant_id": namespace["tenant_id"], + "user_id": member["id"], + "invited_by": namespace["owner"], + "role": member["role"], + "status": member["status"], + "created_at": member["added_at"], + "updated_at": member["added_at"], + "status_updated_at": member["added_at"], + "expires_at": member["expires_at"], + "invitations": 1, + }, + ) + } + + if member["status"] == "accepted" { + member := bson.M{"id": member["id"], "added_at": member["added_at"], "role": member["role"]} + updatedMembers = append(updatedMembers, member) + } + } + } + + namespace["members"] = updatedMembers + namespacesToUpdate = append(namespacesToUpdate, namespace) + } + } + + if err := cursor.Err(); err != nil { + log.WithError(err).Error("Cursor error while iterating namespaces") + + return nil, err + } + + if len(invitations) > 0 { + if _, err = db.Collection("membership_invitations").InsertMany(sCtx, invitations); err != nil { + log.WithError(err).Error("Failed to insert membership invitations") + + return nil, err + } + + log.WithField("count", len(invitations)).Info("Successfully migrated member invitations to membership_invitations collection") + } else { + log.Info("No member invitations found to migrate") + } + + for _, ns := range namespacesToUpdate { + nsID := ns["_id"] + if _, err = db.Collection("namespaces").ReplaceOne(sCtx, bson.M{"_id": nsID}, ns); err != nil { + log.WithError(err).Error("Failed to update namespace") + + return nil, err + } + } + + if len(namespacesToUpdate) > 0 { + log.WithField("count", len(namespacesToUpdate)).Info("Successfully updated namespaces with cleaned members") + } + + return nil, nil + }) + + return err + }), + + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 118, + "action": "Down", + }).Warning("Migration down is not implemented - this migration cannot be reversed safely") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_118_test.go b/api/store/mongo/migrations/migration_118_test.go new file mode 100644 index 00000000000..0e2b9da81e1 --- /dev/null +++ b/api/store/mongo/migrations/migration_118_test.go @@ -0,0 +1,169 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration118Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds migrating namespace members to membership_invitations collection", + setup: func() error { + ownerID := primitive.NewObjectID() + memberID1 := primitive.NewObjectID() + memberID2 := primitive.NewObjectID() + + namespaces := []bson.M{ + { + "_id": primitive.NewObjectID(), + "name": "test-namespace-1", + "owner": ownerID, + "tenant_id": "tenant-1", + "members": bson.A{ + bson.M{ + "id": ownerID, + "added_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "role": "owner", + "status": "accepted", + "expires_at": nil, + }, + bson.M{ + "id": memberID1, + "added_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "role": "observer", + "status": "pending", + "expires_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp().Add(7 * 24 * 60 * 60 * 1000)), + }, + bson.M{ + "id": memberID2, + "added_at": primitive.NewDateTimeFromTime(primitive.NewObjectID().Timestamp()), + "role": "administrator", + "status": "accepted", + "expires_at": nil, + }, + }, + }, + } + + _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0]}) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c.Database("test").Collection("membership_invitations").Find(ctx, bson.M{}) + require.NoError(tt, err) + + invitations := make([]bson.M, 0) + require.NoError(tt, cursor.All(ctx, &invitations)) + require.Equal(tt, 2, len(invitations)) + + ownerFound := false + for _, invitation := range invitations { + require.NotNil(tt, invitation["_id"]) + require.Equal(tt, "tenant-1", invitation["tenant_id"]) + require.NotNil(tt, invitation["user_id"]) + require.NotNil(tt, invitation["invited_by"]) + require.NotNil(tt, invitation["role"]) + require.NotNil(tt, invitation["status"]) + require.NotNil(tt, invitation["created_at"]) + require.NotNil(tt, invitation["updated_at"]) + require.NotNil(tt, invitation["status_updated_at"]) + require.Equal(tt, int32(1), invitation["invitations"]) + + require.NotEqual(tt, "owner", invitation["role"]) + if invitation["role"] == "owner" { + ownerFound = true + } + } + require.False(tt, ownerFound, "Owner should not have an invitation created") + + namespaceCursor, err := c.Database("test").Collection("namespaces").Find(ctx, bson.M{"tenant_id": "tenant-1"}) + require.NoError(tt, err) + + namespaces := make([]bson.M, 0) + require.NoError(tt, namespaceCursor.All(ctx, &namespaces)) + require.Equal(tt, 1, len(namespaces)) + + namespace := namespaces[0] + members, ok := namespace["members"].(bson.A) + require.True(tt, ok) + require.Equal(tt, 2, len(members)) + + for _, m := range members { + member, ok := m.(bson.M) + require.True(tt, ok) + require.NotNil(tt, member["id"]) + require.NotNil(tt, member["added_at"]) + require.NotNil(tt, member["role"]) + require.Nil(tt, member["status"]) + require.Nil(tt, member["expires_at"]) + } + }, + }, + { + description: "handles namespace with no members gracefully", + setup: func() error { + namespaces := []bson.M{ + { + "_id": primitive.NewObjectID(), + "name": "empty-namespace", + "owner": primitive.NewObjectID(), + "tenant_id": "tenant-empty", + "members": bson.A{}, + }, + } + + _, err := c.Database("test").Collection("namespaces").InsertMany(ctx, []any{namespaces[0]}) + + return err + }, + verify: func(tt *testing.T) { + count, err := c.Database("test").Collection("membership_invitations").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), count) + + namespaceCount, err := c.Database("test").Collection("namespaces").CountDocuments(ctx, bson.M{"tenant_id": "tenant-empty"}) + require.NoError(tt, err) + require.Equal(tt, int64(1), namespaceCount) + }, + }, + { + description: "handles empty namespaces collection gracefully", + setup: func() error { + return nil + }, + verify: func(tt *testing.T) { + count, err := c.Database("test").Collection("membership_invitations").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), count) + + namespaceCount, err := c.Database("test").Collection("namespaces").CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, int64(0), namespaceCount) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { require.NoError(tt, srv.Reset()) }) + + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[117]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_119.go b/api/store/mongo/migrations/migration_119.go new file mode 100644 index 00000000000..8483c2a7b5a --- /dev/null +++ b/api/store/mongo/migrations/migration_119.go @@ -0,0 +1,95 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration119 = migrate.Migration{ + Version: 119, + Description: "Create indexes on membership_invitations collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 119, + "action": "Up", + }).Info("Applying migration up") + + indexes := []struct { + name string + model mongo.IndexModel + }{ + { + name: "tenant_user_status_pending_unique", + model: mongo.IndexModel{ + Keys: bson.D{ + {Key: "tenant_id", Value: 1}, + {Key: "user_id", Value: 1}, + {Key: "status", Value: 1}, + }, + Options: options.Index(). + SetName("tenant_user_status_pending_unique"). + SetUnique(true). + SetPartialFilterExpression(bson.M{"status": "pending"}), + }, + }, + { + name: "tenant_user_created_at", + model: mongo.IndexModel{ + Keys: bson.D{ + {Key: "tenant_id", Value: 1}, + {Key: "user_id", Value: 1}, + }, + Options: options.Index().SetName("tenant_user_created_at"), + }, + }, + { + name: "user_status", + model: mongo.IndexModel{ + Keys: bson.D{ + {Key: "user_id", Value: 1}, + {Key: "status", Value: 1}, + }, + Options: options.Index().SetName("user_status"), + }, + }, + } + + for _, ix := range indexes { + if _, err := db.Collection("membership_invitations").Indexes().CreateOne(ctx, ix.model); err != nil { + log.WithError(err).WithField("index", ix.name).Error("Failed to create index") + + return err + } + } + + log.Info("Successfully created indexes on membership_invitations collection") + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 119, + "action": "Down", + }).Info("Applying migration down") + + indexes := []string{"tenant_user_status_pending_unique", "tenant_user_created_at", "user_status"} + for _, ix := range indexes { + if _, err := db.Collection("membership_invitations").Indexes().DropOne(ctx, ix); err != nil { + log.WithError(err).WithField("index", ix).Error("Failed to drop index") + + return err + } + } + + log.Info("Successfully dropped indexes from membership_invitations collection") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_11_test.go b/api/store/mongo/migrations/migration_11_test.go index 47c8fb6facb..8e9a86db2fd 100644 --- a/api/store/mongo/migrations/migration_11_test.go +++ b/api/store/mongo/migrations/migration_11_test.go @@ -4,10 +4,8 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" @@ -15,23 +13,22 @@ import ( ) func TestMigration11(t *testing.T) { - logrus.Info("Testing Migration 11 - Test if the private_keys has ttl system") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:11]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:11]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) pk := models.PrivateKey{ CreatedAt: clock.Now(), } - _, err = db.Client().Database("test").Collection("private_keys").InsertOne(context.TODO(), pk) + _, err = c.Database("test").Collection("private_keys").InsertOne(context.TODO(), pk) assert.NoError(t, err) - index := db.Client().Database("test").Collection("private_keys").Indexes() + index := c.Database("test").Collection("private_keys").Indexes() cursor, err := index.List(context.TODO()) assert.NoError(t, err) diff --git a/api/store/mongo/migrations/migration_12.go b/api/store/mongo/migrations/migration_12.go index b5e14b1d065..945702eaf7b 100644 --- a/api/store/mongo/migrations/migration_12.go +++ b/api/store/mongo/migrations/migration_12.go @@ -13,7 +13,7 @@ import ( var migration12 = migrate.Migration{ Version: 12, Description: "Set the tenant_id as unique in the namespaces collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 12, @@ -23,25 +23,25 @@ var migration12 = migrate.Migration{ Keys: bson.D{{"tenant_id", 1}}, Options: options.Index().SetName("tenant_id").SetUnique(true), } - if _, err := db.Collection("namespaces").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("namespaces").Indexes().CreateOne(ctx, mod); err != nil { return err } mod = mongo.IndexModel{ Keys: bson.D{{"name", 1}}, Options: options.Index().SetName("name").SetUnique(true), } - _, err := db.Collection("namespaces").Indexes().CreateOne(context.TODO(), mod) + _, err := db.Collection("namespaces").Indexes().CreateOne(ctx, mod) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 12, "action": "Down", }).Info("Applying migration") - _, err := db.Collection("namespaces").Indexes().DropOne(context.TODO(), "tenant_id") + _, err := db.Collection("namespaces").Indexes().DropOne(ctx, "tenant_id") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_12_test.go b/api/store/mongo/migrations/migration_12_test.go index fb1f80bae90..df236d59f74 100644 --- a/api/store/mongo/migrations/migration_12_test.go +++ b/api/store/mongo/migrations/migration_12_test.go @@ -4,33 +4,30 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration12(t *testing.T) { - logrus.Info("Testing Migration 12 - Test if the tenant_id is set unique") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) ns1 := models.Namespace{Name: "name", TenantID: "1"} ns2 := models.Namespace{Name: "name", TenantID: "1"} - _, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), ns1) + _, err := c.Database("test").Collection("namespaces").InsertOne(context.TODO(), ns1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), ns2) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), ns2) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:11]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:11]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:12]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:12]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.Error(t, err) } diff --git a/api/store/mongo/migrations/migration_13.go b/api/store/mongo/migrations/migration_13.go index 1345bb4efc5..a3836d0b993 100644 --- a/api/store/mongo/migrations/migration_13.go +++ b/api/store/mongo/migrations/migration_13.go @@ -13,7 +13,7 @@ import ( var migration13 = migrate.Migration{ Version: 13, Description: "Change on several collections", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 13, @@ -23,7 +23,7 @@ var migration13 = migrate.Migration{ Keys: bson.D{{"uid", 1}}, Options: options.Index().SetName("uid").SetUnique(true), } - _, err := db.Collection("devices").Indexes().CreateOne(context.TODO(), mod) + _, err := db.Collection("devices").Indexes().CreateOne(ctx, mod) if err != nil { return err } @@ -32,7 +32,7 @@ var migration13 = migrate.Migration{ Keys: bson.D{{"last_seen", 1}}, Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(30), } - _, err = db.Collection("connected_devices").Indexes().CreateOne(context.TODO(), mod) + _, err = db.Collection("connected_devices").Indexes().CreateOne(ctx, mod) if err != nil { return err } @@ -41,7 +41,7 @@ var migration13 = migrate.Migration{ Keys: bson.D{{"uid", 1}}, Options: options.Index().SetName("uid").SetUnique(false), } - _, err = db.Collection("connected_devices").Indexes().CreateOne(context.TODO(), mod) + _, err = db.Collection("connected_devices").Indexes().CreateOne(ctx, mod) if err != nil { return err } @@ -50,7 +50,7 @@ var migration13 = migrate.Migration{ Keys: bson.D{{"uid", 1}}, Options: options.Index().SetName("uid").SetUnique(true), } - _, err = db.Collection("sessions").Indexes().CreateOne(context.TODO(), mod) + _, err = db.Collection("sessions").Indexes().CreateOne(ctx, mod) if err != nil { return err } @@ -59,7 +59,7 @@ var migration13 = migrate.Migration{ Keys: bson.D{{"last_seen", 1}}, Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(30), } - _, err = db.Collection("active_sessions").Indexes().CreateOne(context.TODO(), mod) + _, err = db.Collection("active_sessions").Indexes().CreateOne(ctx, mod) if err != nil { return err } @@ -68,7 +68,7 @@ var migration13 = migrate.Migration{ Keys: bson.D{{"uid", 1}}, Options: options.Index().SetName("uid").SetUnique(false), } - _, err = db.Collection("active_sessions").Indexes().CreateOne(context.TODO(), mod) + _, err = db.Collection("active_sessions").Indexes().CreateOne(ctx, mod) if err != nil { return err } @@ -77,7 +77,7 @@ var migration13 = migrate.Migration{ Keys: bson.D{{"username", 1}}, Options: options.Index().SetName("username").SetUnique(true), } - _, err = db.Collection("users").Indexes().CreateOne(context.TODO(), mod) + _, err = db.Collection("users").Indexes().CreateOne(ctx, mod) if err != nil { return err } @@ -86,39 +86,39 @@ var migration13 = migrate.Migration{ Keys: bson.D{{"tenant_id", 1}}, Options: options.Index().SetName("tenant_id").SetUnique(true), } - _, err = db.Collection("users").Indexes().CreateOne(context.TODO(), mod) + _, err = db.Collection("users").Indexes().CreateOne(ctx, mod) if err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 13, "action": "Down", }).Info("Applying migration") - if _, err := db.Collection("devices").Indexes().DropOne(context.TODO(), "uid"); err != nil { + if _, err := db.Collection("devices").Indexes().DropOne(ctx, "uid"); err != nil { return err } - if _, err := db.Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen"); err != nil { + if _, err := db.Collection("connected_devices").Indexes().DropOne(ctx, "last_seen"); err != nil { return err } - if _, err := db.Collection("connected_devices").Indexes().DropOne(context.TODO(), "uid"); err != nil { + if _, err := db.Collection("connected_devices").Indexes().DropOne(ctx, "uid"); err != nil { return err } - if _, err := db.Collection("sessions").Indexes().DropOne(context.TODO(), "uid"); err != nil { + if _, err := db.Collection("sessions").Indexes().DropOne(ctx, "uid"); err != nil { return err } - if _, err := db.Collection("active_sessions").Indexes().DropOne(context.TODO(), "last_seen"); err != nil { + if _, err := db.Collection("active_sessions").Indexes().DropOne(ctx, "last_seen"); err != nil { return err } - if _, err := db.Collection("users").Indexes().DropOne(context.TODO(), "username"); err != nil { + if _, err := db.Collection("users").Indexes().DropOne(ctx, "username"); err != nil { return err } - _, err := db.Collection("users").Indexes().DropOne(context.TODO(), "tenant_id") + _, err := db.Collection("users").Indexes().DropOne(ctx, "tenant_id") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_13_test.go b/api/store/mongo/migrations/migration_13_test.go index a7e0432470a..13736552ec1 100644 --- a/api/store/mongo/migrations/migration_13_test.go +++ b/api/store/mongo/migrations/migration_13_test.go @@ -3,8 +3,8 @@ package migrations import ( "context" "testing" + "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" @@ -12,10 +12,15 @@ import ( ) func TestMigration13(t *testing.T) { - logrus.Info("Testing Migration 13 - Test the several changes on the collections") + type ConnectedDevice struct { + UID string `json:"uid"` + TenantID string `json:"tenant_id" bson:"tenant_id"` + LastSeen time.Time `json:"last_seen" bson:"last_seen"` + } - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) logrus.Info("Test if the UID is unique in the devices collection") @@ -27,30 +32,30 @@ func TestMigration13(t *testing.T) { UID: "1", } - _, err := db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device1) + _, err := c.Database("test").Collection("devices").InsertOne(context.TODO(), device1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device2) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device2) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:13]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:13]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.Error(t, err) logrus.Info("Test if the uid in the connected_devices collection is not unique") - connectedDevice1 := models.ConnectedDevice{ + connectedDevice1 := ConnectedDevice{ UID: "1", } - connectedDevice2 := models.ConnectedDevice{ + connectedDevice2 := ConnectedDevice{ UID: "1", } - _, err = db.Client().Database("test").Collection("connected_devices").InsertOne(context.TODO(), connectedDevice1) + _, err = c.Database("test").Collection("connected_devices").InsertOne(context.TODO(), connectedDevice1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").InsertOne(context.TODO(), connectedDevice2) + _, err = c.Database("test").Collection("connected_devices").InsertOne(context.TODO(), connectedDevice2) assert.NoError(t, err) logrus.Info("Test if the uid in the sessions collection is unique") @@ -63,10 +68,10 @@ func TestMigration13(t *testing.T) { UID: "1", } - _, err = db.Client().Database("test").Collection("sessions").InsertOne(context.TODO(), session1) + _, err = c.Database("test").Collection("sessions").InsertOne(context.TODO(), session1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("sessions").InsertOne(context.TODO(), session2) + _, err = c.Database("test").Collection("sessions").InsertOne(context.TODO(), session2) assert.NoError(t, err) activeSession1 := models.ActiveSession{ @@ -77,10 +82,10 @@ func TestMigration13(t *testing.T) { UID: "1", } - _, err = db.Client().Database("test").Collection("active_sessions").InsertOne(context.TODO(), activeSession1) + _, err = c.Database("test").Collection("active_sessions").InsertOne(context.TODO(), activeSession1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("active_sessions").InsertOne(context.TODO(), activeSession2) + _, err = c.Database("test").Collection("active_sessions").InsertOne(context.TODO(), activeSession2) assert.NoError(t, err) logrus.Info("Test if the tenant_id in the users collection is unique") @@ -103,9 +108,9 @@ func TestMigration13(t *testing.T) { Email: "test2", } - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user1) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user2) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user2) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_14.go b/api/store/mongo/migrations/migration_14.go index 628095ba193..58f2e29b7ff 100644 --- a/api/store/mongo/migrations/migration_14.go +++ b/api/store/mongo/migrations/migration_14.go @@ -14,7 +14,7 @@ import ( var migration14 = migrate.Migration{ Version: 14, Description: "Set the right tenant_id in the users collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 14, @@ -26,19 +26,19 @@ var migration14 = migrate.Migration{ ID string `json:"id,omitempty" bson:"_id,omitempty"` SessionRecord bool `json:"session_record" bson:"session_record,omitempty"` } - if _, err := db.Collection("users").Indexes().DropOne(context.TODO(), "tenant_id"); err != nil { + if _, err := db.Collection("users").Indexes().DropOne(ctx, "tenant_id"); err != nil { return err } - if _, err := db.Collection("users").Indexes().DropOne(context.TODO(), "session_record"); err != nil { + if _, err := db.Collection("users").Indexes().DropOne(ctx, "session_record"); err != nil { return err } - cursor, err := db.Collection("users").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("users").Find(ctx, bson.D{}) if err != nil { return err } - defer cursor.Close(context.TODO()) - for cursor.Next(context.TODO()) { + defer cursor.Close(ctx) + for cursor.Next(ctx) { user := new(user) err := cursor.Decode(&user) if err != nil { @@ -74,45 +74,45 @@ var migration14 = migrate.Migration{ Settings: &settings, } - _, err = db.Collection("namespaces").InsertOne(context.TODO(), &namespace) + _, err = db.Collection("namespaces").InsertOne(ctx, &namespace) if err != nil { return nil } - if _, err := db.Collection("users").UpdateOne(context.TODO(), bson.M{"tenant_id": user.TenantID}, bson.M{"$unset": bson.M{"tenant_id": ""}}); err != nil { + if _, err := db.Collection("users").UpdateOne(ctx, bson.M{"tenant_id": user.TenantID}, bson.M{"$unset": bson.M{"tenant_id": ""}}); err != nil { return err } - if _, err := db.Collection("users").UpdateOne(context.TODO(), bson.M{"tenant_id": user.TenantID}, bson.M{"$unset": bson.M{"session_record": ""}}); err != nil { + if _, err := db.Collection("users").UpdateOne(ctx, bson.M{"tenant_id": user.TenantID}, bson.M{"$unset": bson.M{"session_record": ""}}); err != nil { return err } } return cursor.Err() - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 14, "action": "Down", }).Info("Applying migration") - cursor, err := db.Collection("namespaces").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("namespaces").Find(ctx, bson.D{}) if err != nil { return err } - defer cursor.Close(context.TODO()) - for cursor.Next(context.TODO()) { + defer cursor.Close(ctx) + for cursor.Next(ctx) { namespace := new(models.Namespace) err := cursor.Decode(&namespace) if err != nil { return err } - _, err = db.Collection("users").UpdateOne(context.TODO(), bson.M{"_id": namespace.Owner}, bson.M{"$set": bson.M{"tenant": namespace.TenantID}}) + _, err = db.Collection("users").UpdateOne(ctx, bson.M{"_id": namespace.Owner}, bson.M{"$set": bson.M{"tenant": namespace.TenantID}}) if err != nil { return err } } return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_14_test.go b/api/store/mongo/migrations/migration_14_test.go index f7d826e405e..012a82c272e 100644 --- a/api/store/mongo/migrations/migration_14_test.go +++ b/api/store/mongo/migrations/migration_14_test.go @@ -5,18 +5,15 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration14(t *testing.T) { - logrus.Info("Testing Migration 14 - Test if the right tenant_id is set") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) type user struct { Username string `json:"username" bson:",omitempty"` @@ -52,16 +49,16 @@ func TestMigration14(t *testing.T) { TenantID: "1", } - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user1) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), ns) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), ns) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:14]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:14]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"tenant_id": "1"}).Decode(&user1) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"tenant_id": "1"}).Decode(&user1) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_15.go b/api/store/mongo/migrations/migration_15.go index c18f846cfa6..4d28469266d 100644 --- a/api/store/mongo/migrations/migration_15.go +++ b/api/store/mongo/migrations/migration_15.go @@ -12,13 +12,13 @@ import ( var migration15 = migrate.Migration{ Version: 15, Description: "Set all names to lowercase in the namespaces", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 15, "action": "Up", }).Info("Applying migration") - _, err := db.Collection("namespaces").UpdateMany(context.TODO(), bson.M{}, []bson.M{ + _, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, []bson.M{ { "$set": bson.M{ "name": bson.M{"$toLower": "$name"}, @@ -27,8 +27,8 @@ var migration15 = migrate.Migration{ }) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 15, @@ -36,5 +36,5 @@ var migration15 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_15_test.go b/api/store/mongo/migrations/migration_15_test.go index 4cae6991111..4d80c08f937 100644 --- a/api/store/mongo/migrations/migration_15_test.go +++ b/api/store/mongo/migrations/migration_15_test.go @@ -4,35 +4,32 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration15(t *testing.T) { - logrus.Info("Testing Migration 15 - Test if the name is in lowercase") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:14]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:14]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) ns := models.Namespace{ Name: "Test", } - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), ns) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), ns) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:15]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:15]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"name": "test"}).Decode(&ns) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"name": "test"}).Decode(&ns) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_16.go b/api/store/mongo/migrations/migration_16.go index 87c541f6037..bfe5997facf 100644 --- a/api/store/mongo/migrations/migration_16.go +++ b/api/store/mongo/migrations/migration_16.go @@ -13,7 +13,7 @@ import ( var migration16 = migrate.Migration{ Version: 16, Description: "Set the fingerprint as unique on public_keys collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 16, @@ -23,19 +23,18 @@ var migration16 = migrate.Migration{ Keys: bson.D{{"fingerprint", 1}}, Options: options.Index().SetName("fingerprint").SetUnique(true), } - _, err := db.Collection("public_keys").Indexes().CreateOne(context.TODO(), mod) + _, err := db.Collection("public_keys").Indexes().CreateOne(ctx, mod) return err - }, - - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 16, "action": "Down", }).Info("Applying migration") - _, err := db.Collection("public_keys").Indexes().DropOne(context.TODO(), "fingerprint") + _, err := db.Collection("public_keys").Indexes().DropOne(ctx, "fingerprint") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_16_test.go b/api/store/mongo/migrations/migration_16_test.go index 09b42905ddb..0ea40b8d8c0 100644 --- a/api/store/mongo/migrations/migration_16_test.go +++ b/api/store/mongo/migrations/migration_16_test.go @@ -4,33 +4,30 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration16(t *testing.T) { - logrus.Info("Testing Migration 16 - Test if the fingerprint is set unique") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) pk1 := models.PublicKey{Fingerprint: "test"} pk2 := models.PublicKey{Fingerprint: "test"} - _, err := db.Client().Database("test").Collection("public_keys").InsertOne(context.TODO(), pk1) + _, err := c.Database("test").Collection("public_keys").InsertOne(context.TODO(), pk1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("public_keys").InsertOne(context.TODO(), pk2) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), pk2) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:15]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:15]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:16]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:16]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.Error(t, err) } diff --git a/api/store/mongo/migrations/migration_17.go b/api/store/mongo/migrations/migration_17.go index f1ebce798f0..b7c7aae4df0 100644 --- a/api/store/mongo/migrations/migration_17.go +++ b/api/store/mongo/migrations/migration_17.go @@ -15,13 +15,13 @@ import ( var migration17 = migrate.Migration{ Version: 17, Description: "Remove the namespaces, devices, session, connected_devices, firewall_rules and public_keys in the users", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 17, "action": "Up", }).Info("Applying migration") - cursor, err := db.Collection("namespaces").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("namespaces").Find(ctx, bson.D{}) if err != nil { return err } @@ -43,7 +43,7 @@ var migration17 = migrate.Migration{ CreatedAt time.Time `json:"created_at" bson:"created_at"` } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { namespace := Namespace{} err = cursor.Decode(&namespace) @@ -57,11 +57,11 @@ var migration17 = migrate.Migration{ } user := new(models.User) - if err := db.Collection("users").FindOne(context.TODO(), bson.M{"_id": objID}).Decode(&user); err != nil { + if err := db.Collection("users").FindOne(ctx, bson.M{"_id": objID}).Decode(&user); err != nil { if err != mongo.ErrNoDocuments { return err } - if _, err := db.Collection("namespaces").DeleteOne(context.TODO(), bson.M{"tenant_id": namespace.TenantID}); err != nil { + if _, err := db.Collection("namespaces").DeleteOne(ctx, bson.M{"tenant_id": namespace.TenantID}); err != nil { return err } } @@ -71,14 +71,14 @@ var migration17 = migrate.Migration{ return err } - cursor.Close(context.TODO()) + cursor.Close(ctx) - cursor, err = db.Collection("devices").Find(context.TODO(), bson.D{}) + cursor, err = db.Collection("devices").Find(ctx, bson.D{}) if err != nil { return err } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { device := new(models.Device) err = cursor.Decode(&device) if err != nil { @@ -86,19 +86,19 @@ var migration17 = migrate.Migration{ } namespace := Namespace{} - if err := db.Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": device.TenantID}).Decode(&namespace); err != nil { + if err := db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": device.TenantID}).Decode(&namespace); err != nil { if err != mongo.ErrNoDocuments { return err } - if _, err := db.Collection("devices").DeleteOne(context.TODO(), bson.M{"uid": device.UID}); err != nil { + if _, err := db.Collection("devices").DeleteOne(ctx, bson.M{"uid": device.UID}); err != nil { return err } - if _, err := db.Collection("sessions").DeleteMany(context.TODO(), bson.M{"device_uid": device.UID}); err != nil { + if _, err := db.Collection("sessions").DeleteMany(ctx, bson.M{"device_uid": device.UID}); err != nil { return err } - if _, err := db.Collection("connected_devices").DeleteMany(context.TODO(), bson.M{"uid": device.UID}); err != nil { + if _, err := db.Collection("connected_devices").DeleteMany(ctx, bson.M{"uid": device.UID}); err != nil { return err } } @@ -107,13 +107,13 @@ var migration17 = migrate.Migration{ return err } - cursor.Close(context.TODO()) + cursor.Close(ctx) - cursor, err = db.Collection("firewall_rules").Find(context.TODO(), bson.D{}) + cursor, err = db.Collection("firewall_rules").Find(ctx, bson.D{}) if err != nil { return err } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { rule := new(models.FirewallRule) err := cursor.Decode(&rule) if err != nil { @@ -121,11 +121,11 @@ var migration17 = migrate.Migration{ } namespace := Namespace{} - if err := db.Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": rule.TenantID}).Decode(&namespace); err != nil { + if err := db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": rule.TenantID}).Decode(&namespace); err != nil { if err != mongo.ErrNoDocuments { return err } - if _, err := db.Collection("firewall_rules").DeleteOne(context.TODO(), bson.M{"tenant_id": rule.TenantID}); err != nil { + if _, err := db.Collection("firewall_rules").DeleteOne(ctx, bson.M{"tenant_id": rule.TenantID}); err != nil { return err } } @@ -134,25 +134,25 @@ var migration17 = migrate.Migration{ return err } - cursor.Close(context.TODO()) + cursor.Close(ctx) - cursor, err = db.Collection("public_keys").Find(context.TODO(), bson.D{}) + cursor, err = db.Collection("public_keys").Find(ctx, bson.D{}) if err != nil { return err } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { key := new(models.PublicKey) err := cursor.Decode(&key) if err != nil { return err } namespace := Namespace{} - if err := db.Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": key.TenantID}).Decode(&namespace); err != nil { + if err := db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": key.TenantID}).Decode(&namespace); err != nil { if err != mongo.ErrNoDocuments { return err } - if _, err := db.Collection("public_keys").DeleteOne(context.TODO(), bson.M{"tenant_id": key.TenantID}); err != nil { + if _, err := db.Collection("public_keys").DeleteOne(ctx, bson.M{"tenant_id": key.TenantID}); err != nil { return err } } @@ -161,11 +161,11 @@ var migration17 = migrate.Migration{ return err } - cursor.Close(context.TODO()) + cursor.Close(ctx) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 17, @@ -173,5 +173,5 @@ var migration17 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_17_test.go b/api/store/mongo/migrations/migration_17_test.go index a6c2c0ec762..f3995348e3d 100644 --- a/api/store/mongo/migrations/migration_17_test.go +++ b/api/store/mongo/migrations/migration_17_test.go @@ -5,19 +5,16 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration17(t *testing.T) { - logrus.Info("Testing Migration 17 - Test if the namespaces, devices, session, connected_devices, firewall_rules and public_keys was deleted for users") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) user := models.User{ UserData: models.UserData{ @@ -25,7 +22,9 @@ func TestMigration17(t *testing.T) { Username: "username", Email: "email", }, - UserPassword: models.NewUserPassword("password"), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, } type NamespaceSettings struct { @@ -37,6 +36,12 @@ func TestMigration17(t *testing.T) { Name string `json:"name,omitempty" bson:"-"` } + type ConnectedDevice struct { + UID string `json:"uid"` + TenantID string `json:"tenant_id" bson:"tenant_id"` + LastSeen time.Time `json:"last_seen" bson:"last_seen"` + } + type Namespace struct { Name string `json:"name" validate:"required,hostname_rfc1123,excludes=."` Owner string `json:"owner"` @@ -65,7 +70,7 @@ func TestMigration17(t *testing.T) { DeviceUID: "1", } - connectedDevice := models.ConnectedDevice{ + connectedDevice := ConnectedDevice{ UID: "1", } @@ -77,46 +82,46 @@ func TestMigration17(t *testing.T) { TenantID: "tenant", } - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("sessions").InsertOne(context.TODO(), session) + _, err = c.Database("test").Collection("sessions").InsertOne(context.TODO(), session) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").InsertOne(context.TODO(), connectedDevice) + _, err = c.Database("test").Collection("connected_devices").InsertOne(context.TODO(), connectedDevice) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("firewall_rules").InsertOne(context.TODO(), firewallRules) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(context.TODO(), firewallRules) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("public_keys").InsertOne(context.TODO(), pk) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), pk) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:17]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:17]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": namespace.TenantID}).Decode(&namespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": namespace.TenantID}).Decode(&namespace) assert.Error(t, err) - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": device.TenantID}).Decode(&device) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": device.TenantID}).Decode(&device) assert.Error(t, err) - err = db.Client().Database("test").Collection("sessions").FindOne(context.TODO(), bson.M{"device_uid": session.DeviceUID}).Decode(&session) + err = c.Database("test").Collection("sessions").FindOne(context.TODO(), bson.M{"device_uid": session.DeviceUID}).Decode(&session) assert.Error(t, err) - err = db.Client().Database("test").Collection("connected_devices").FindOne(context.TODO(), bson.M{"uid": connectedDevice.UID}).Decode(&connectedDevice) + err = c.Database("test").Collection("connected_devices").FindOne(context.TODO(), bson.M{"uid": connectedDevice.UID}).Decode(&connectedDevice) assert.Error(t, err) - err = db.Client().Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": firewallRules.TenantID}).Decode(&firewallRules) + err = c.Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": firewallRules.TenantID}).Decode(&firewallRules) assert.Error(t, err) - err = db.Client().Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": pk.TenantID}).Decode(&pk) + err = c.Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": pk.TenantID}).Decode(&pk) assert.Error(t, err) } diff --git a/api/store/mongo/migrations/migration_18.go b/api/store/mongo/migrations/migration_18.go index be53dcf8194..aa6b910c130 100644 --- a/api/store/mongo/migrations/migration_18.go +++ b/api/store/mongo/migrations/migration_18.go @@ -13,22 +13,22 @@ import ( var migration18 = migrate.Migration{ Version: 18, Description: "Set the max_devices value in the namespaces collection to 3 on enterprise", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 18, "action": "Up", }).Info("Applying migration") if envs.IsEnterprise() { - _, err := db.Collection("namespaces").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"max_devices": 3}}) + _, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"max_devices": 3}}) return err } - _, err := db.Collection("namespaces").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"max_devices": -1}}) + _, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"max_devices": -1}}) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 18, @@ -36,5 +36,5 @@ var migration18 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_18_test.go b/api/store/mongo/migrations/migration_18_test.go index 1fa7b83ffd2..c0eb33520fd 100644 --- a/api/store/mongo/migrations/migration_18_test.go +++ b/api/store/mongo/migrations/migration_18_test.go @@ -4,19 +4,16 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration18(t *testing.T) { - logrus.Info("Testing Migration 18 - Test if the max_devices is 3") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) namespace := models.Namespace{ Name: "name", @@ -26,18 +23,17 @@ func TestMigration18(t *testing.T) { migrations := GenerateMigrations()[:17] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[17]) - err = migrates.Up(migrate.AllAvailable) - assert.NoError(t, err) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[17]) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&namespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&namespace) assert.NoError(t, err) - assert.Equal(t, namespace.MaxDevices, 3) + assert.Equal(t, 3, namespace.MaxDevices) } diff --git a/api/store/mongo/migrations/migration_19.go b/api/store/mongo/migrations/migration_19.go index abaf3a6d04d..132f518eb41 100644 --- a/api/store/mongo/migrations/migration_19.go +++ b/api/store/mongo/migrations/migration_19.go @@ -13,17 +13,17 @@ import ( var migration19 = migrate.Migration{ Version: 19, Description: "Remove all fingerprint associated with a public keys collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 19, "action": "Up", }).Info("Applying migration") - _, err := db.Collection("public_keys").Indexes().DropOne(context.TODO(), "fingerprint") + _, err := db.Collection("public_keys").Indexes().DropOne(ctx, "fingerprint") return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 19, @@ -33,8 +33,8 @@ var migration19 = migrate.Migration{ Keys: bson.D{{"fingerprint", 1}}, Options: options.Index().SetName("fingerprint").SetUnique(true), } - _, err := db.Collection("public_keys").Indexes().CreateOne(context.TODO(), mod) + _, err := db.Collection("public_keys").Indexes().CreateOne(ctx, mod) return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_19_test.go b/api/store/mongo/migrations/migration_19_test.go index 7057fab4bd5..06778a6d020 100644 --- a/api/store/mongo/migrations/migration_19_test.go +++ b/api/store/mongo/migrations/migration_19_test.go @@ -5,21 +5,18 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration19(t *testing.T) { - logrus.Info("Testing Migration 19 - Test if the fingerprint is removed") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:19]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:19]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) type PublicKeyFields struct { @@ -42,10 +39,10 @@ func TestMigration19(t *testing.T) { PublicKeyFields: PublicKeyFields{Name: "teste1", Hostname: ".*"}, } - _, err = db.Client().Database("test").Collection("public_keys").InsertOne(context.TODO(), pk) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), pk) assert.NoError(t, err) - err = db.Client().Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&pk) + err = c.Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&pk) assert.NoError(t, err) assert.Equal(t, pk.Fingerprint, "") } diff --git a/api/store/mongo/migrations/migration_1_test.go b/api/store/mongo/migrations/migration_1_test.go index aeb0fd34f83..fe5169db2a8 100644 --- a/api/store/mongo/migrations/migration_1_test.go +++ b/api/store/mongo/migrations/migration_1_test.go @@ -1,21 +1,19 @@ package migrations import ( + "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration1(t *testing.T) { - logrus.Info("Testing Migration 1 - Create the database for the system") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:1]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:1]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_2.go b/api/store/mongo/migrations/migration_2.go index 58f1d5eccfb..a839311ef1f 100644 --- a/api/store/mongo/migrations/migration_2.go +++ b/api/store/mongo/migrations/migration_2.go @@ -1,6 +1,8 @@ package migrations import ( + "context" + "github.com/sirupsen/logrus" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/mongo" @@ -9,7 +11,7 @@ import ( var migration2 = migrate.Migration{ Version: 2, Description: "Rename the column device to device_uid", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 2, @@ -17,8 +19,8 @@ var migration2 = migrate.Migration{ }).Info("Applying migration") return renameField(db, "sessions", "device", "device_uid") - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 2, @@ -26,5 +28,5 @@ var migration2 = migrate.Migration{ }).Info("Applying migration") return renameField(db, "sessions", "device_uid", "device") - }, + }), } diff --git a/api/store/mongo/migrations/migration_20.go b/api/store/mongo/migrations/migration_20.go index 6a372af7a60..63913017b6c 100644 --- a/api/store/mongo/migrations/migration_20.go +++ b/api/store/mongo/migrations/migration_20.go @@ -14,13 +14,13 @@ import ( var migration20 = migrate.Migration{ Version: 20, Description: "Change the model on db for firewall_rules collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 20, "action": "Up", }).Info("Applying migration") - cursor, err := db.Collection("firewall_rules").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("firewall_rules").Find(ctx, bson.D{}) if err != nil { return err } @@ -31,8 +31,8 @@ var migration20 = migrate.Migration{ models.FirewallRuleFields `bson:",inline"` } - defer cursor.Close(context.TODO()) - for cursor.Next(context.TODO()) { + defer cursor.Close(ctx) + for cursor.Next(ctx) { firewall := new(models.FirewallRule) err := cursor.Decode(&firewall) if err != nil { @@ -46,20 +46,19 @@ var migration20 = migrate.Migration{ } if err == nil { - if errDelete := db.Collection("firewall_rules").FindOneAndDelete(context.TODO(), bson.M{"_id": firewall.ID}); errDelete.Err() != nil { + if errDelete := db.Collection("firewall_rules").FindOneAndDelete(ctx, bson.M{"_id": firewall.ID}); errDelete.Err() != nil { continue } - if _, err := db.Collection("firewall_rules").InsertOne(context.TODO(), replacedRule); err != nil { + if _, err := db.Collection("firewall_rules").InsertOne(ctx, replacedRule); err != nil { return err } } } return nil - }, - - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 20, @@ -67,5 +66,5 @@ var migration20 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_20_test.go b/api/store/mongo/migrations/migration_20_test.go index 6b23e2d37bf..a7efc67e028 100644 --- a/api/store/mongo/migrations/migration_20_test.go +++ b/api/store/mongo/migrations/migration_20_test.go @@ -4,9 +4,7 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" @@ -14,10 +12,9 @@ import ( ) func TestMigration20(t *testing.T) { - logrus.Info("Testing Migration 20 - Test if the firewall_rules has change to new one") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) type firewallRule struct { ID primitive.ObjectID `json:"id" bson:"_id"` @@ -29,14 +26,14 @@ func TestMigration20(t *testing.T) { TenantID: "tenant", } - _, err := db.Client().Database("test").Collection("firewall_rules").InsertOne(context.TODO(), fRule) + _, err := c.Database("test").Collection("firewall_rules").InsertOne(context.TODO(), fRule) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[19:20]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[19:20]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) var migratedFirewallRules *models.FirewallRule - err = db.Client().Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedFirewallRules) + err = c.Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedFirewallRules) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_21.go b/api/store/mongo/migrations/migration_21.go index 3fcf0c0c0fb..5e2060b5544 100644 --- a/api/store/mongo/migrations/migration_21.go +++ b/api/store/mongo/migrations/migration_21.go @@ -13,18 +13,18 @@ import ( var migration21 = migrate.Migration{ Version: 21, Description: "Remove all sessions, recorded_sessions for the devices", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 21, "action": "Up", }).Info("Applying migration") - cursor, err := db.Collection("sessions").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("sessions").Find(ctx, bson.D{}) if err != nil { return err } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { session := new(models.Session) err = cursor.Decode(&session) if err != nil { @@ -32,12 +32,12 @@ var migration21 = migrate.Migration{ } device := new(models.Device) - if err := db.Collection("devices").FindOne(context.TODO(), bson.M{"uid": session.DeviceUID}).Decode(&device); err != nil { + if err := db.Collection("devices").FindOne(ctx, bson.M{"uid": session.DeviceUID}).Decode(&device); err != nil { if err != mongo.ErrNoDocuments { return err } - if _, err := db.Collection("sessions").DeleteMany(context.TODO(), bson.M{"device_uid": session.DeviceUID}); err != nil { + if _, err := db.Collection("sessions").DeleteMany(ctx, bson.M{"device_uid": session.DeviceUID}); err != nil { return err } } @@ -47,14 +47,14 @@ var migration21 = migrate.Migration{ return err } - cursor.Close(context.TODO()) + cursor.Close(ctx) - cursor, err = db.Collection("recorded_sessions").Find(context.TODO(), bson.D{}) + cursor, err = db.Collection("recorded_sessions").Find(ctx, bson.D{}) if err != nil { return err } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { record := new(models.RecordedSession) err = cursor.Decode(&record) if err != nil { @@ -62,22 +62,22 @@ var migration21 = migrate.Migration{ } namespace := new(models.Namespace) - if err := db.Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": record.TenantID}).Decode(&namespace); err != nil { + if err := db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": record.TenantID}).Decode(&namespace); err != nil { if err != mongo.ErrNoDocuments { return err } - if _, err := db.Collection("recorded_sessions").DeleteMany(context.TODO(), bson.M{"tenant_id": record.TenantID}); err != nil { + if _, err := db.Collection("recorded_sessions").DeleteMany(ctx, bson.M{"tenant_id": record.TenantID}); err != nil { return err } } session := new(models.Session) - if err := db.Collection("sessions").FindOne(context.TODO(), bson.M{"uid": record.UID}).Decode(&session); err != nil { + if err := db.Collection("sessions").FindOne(ctx, bson.M{"uid": record.UID}).Decode(&session); err != nil { if err != mongo.ErrNoDocuments { return err } - if _, err := db.Collection("recorded_sessions").DeleteMany(context.TODO(), bson.M{"uid": record.UID}); err != nil { + if _, err := db.Collection("recorded_sessions").DeleteMany(ctx, bson.M{"uid": record.UID}); err != nil { return err } } @@ -87,11 +87,11 @@ var migration21 = migrate.Migration{ return err } - cursor.Close(context.TODO()) + cursor.Close(ctx) return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 21, @@ -99,5 +99,5 @@ var migration21 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_21_test.go b/api/store/mongo/migrations/migration_21_test.go index e6455ba9a6f..28521e43e06 100644 --- a/api/store/mongo/migrations/migration_21_test.go +++ b/api/store/mongo/migrations/migration_21_test.go @@ -4,19 +4,16 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration21(t *testing.T) { - logrus.Info("Testing Migration 21 - Test if the sessions and connected_devices was removed for the devices") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) device := models.Device{ UID: "1", @@ -30,24 +27,24 @@ func TestMigration21(t *testing.T) { UID: "1", } - _, err := db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err := c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("recorded_sessions").InsertOne(context.TODO(), recordedSession) + _, err = c.Database("test").Collection("recorded_sessions").InsertOne(context.TODO(), recordedSession) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("sessions").InsertOne(context.TODO(), session) + _, err = c.Database("test").Collection("sessions").InsertOne(context.TODO(), session) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:21]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:21]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) var migratedRecordedSession *models.RecordedSession - err = db.Client().Database("test").Collection("recorded_sessions").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedRecordedSession) + err = c.Database("test").Collection("recorded_sessions").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedRecordedSession) assert.Error(t, err) var migratedSession *models.Session - err = db.Client().Database("test").Collection("sessions").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedSession) + err = c.Database("test").Collection("sessions").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedSession) assert.Error(t, err) } diff --git a/api/store/mongo/migrations/migration_22.go b/api/store/mongo/migrations/migration_22.go index 2bba7fdec5a..16119e8e609 100644 --- a/api/store/mongo/migrations/migration_22.go +++ b/api/store/mongo/migrations/migration_22.go @@ -15,13 +15,13 @@ import ( var migration22 = migrate.Migration{ Version: 22, Description: "Insert the user on the members group for the namespace", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 22, "action": "Up", }).Info("Applying migration") - cursor, err := db.Collection("namespaces").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("namespaces").Find(ctx, bson.D{}) if err != nil { return err } @@ -43,7 +43,7 @@ var migration22 = migrate.Migration{ CreatedAt time.Time `json:"created_at" bson:"created_at"` } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { namespace := Namespace{} @@ -58,8 +58,8 @@ var migration22 = migrate.Migration{ if err != nil { return err } - if err := db.Collection("users").FindOne(context.TODO(), bson.M{"_id": objID}).Decode(&user); err != nil { - if _, err := db.Collection("namespaces").UpdateOne(context.TODO(), bson.M{"tenant_id": namespace.TenantID}, bson.M{"$pull": bson.M{"members": memberID}}); err != nil { + if err := db.Collection("users").FindOne(ctx, bson.M{"_id": objID}).Decode(&user); err != nil { + if _, err := db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": namespace.TenantID}, bson.M{"$pull": bson.M{"members": memberID}}); err != nil { return err } } @@ -67,8 +67,8 @@ var migration22 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 22, @@ -76,5 +76,5 @@ var migration22 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_22_test.go b/api/store/mongo/migrations/migration_22_test.go index ecb02a3585b..320a170b192 100644 --- a/api/store/mongo/migrations/migration_22_test.go +++ b/api/store/mongo/migrations/migration_22_test.go @@ -5,19 +5,16 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration22(t *testing.T) { - logrus.Info("Testing Migration 22 - Test if the user was added to membres group for the namespace") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) user := models.User{ ID: "1", @@ -47,17 +44,17 @@ func TestMigration22(t *testing.T) { Members: []interface{}{"60df59bc65f88d92b974a60f"}, MaxDevices: -1, } - _, err := db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), user) + _, err := c.Database("test").Collection("devices").InsertOne(context.TODO(), user) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), ns) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), ns) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[21:22]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[21:22]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) var migratedNamespace *models.Namespace - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedNamespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedNamespace) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_23.go b/api/store/mongo/migrations/migration_23.go index 9b900e71834..ab401ab77d1 100644 --- a/api/store/mongo/migrations/migration_23.go +++ b/api/store/mongo/migrations/migration_23.go @@ -12,13 +12,13 @@ import ( var migration23 = migrate.Migration{ Version: 23, Description: "change dot in namespace name and hostname to -", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 23, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("namespaces").UpdateMany(context.TODO(), bson.D{}, []bson.M{ + if _, err := db.Collection("namespaces").UpdateMany(ctx, bson.D{}, []bson.M{ { "$set": bson.M{ "name": bson.M{ @@ -30,7 +30,7 @@ var migration23 = migrate.Migration{ return err } - if _, err := db.Collection("devices").UpdateMany(context.TODO(), bson.D{}, []bson.M{ + if _, err := db.Collection("devices").UpdateMany(ctx, bson.D{}, []bson.M{ { "$set": bson.M{ "name": bson.M{ @@ -43,8 +43,8 @@ var migration23 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 23, @@ -52,5 +52,5 @@ var migration23 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_23_test.go b/api/store/mongo/migrations/migration_23_test.go index a4a9c106ef4..d4c55e95e77 100644 --- a/api/store/mongo/migrations/migration_23_test.go +++ b/api/store/mongo/migrations/migration_23_test.go @@ -4,7 +4,6 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" @@ -14,16 +13,17 @@ import ( ) func TestMigration23(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:22] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(22), version) @@ -32,7 +32,7 @@ func TestMigration23(t *testing.T) { Owner: "owner", TenantID: "tenant", } - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) namespace = models.Namespace{ @@ -40,7 +40,7 @@ func TestMigration23(t *testing.T) { Owner: "owner", TenantID: "tenant2", } - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) device := models.Device{ @@ -50,7 +50,7 @@ func TestMigration23(t *testing.T) { TenantID: "tenant", LastSeen: clock.Now(), } - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) device = models.Device{ @@ -60,37 +60,37 @@ func TestMigration23(t *testing.T) { TenantID: "tenant2", LastSeen: clock.Now(), } - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) migration := GenerateMigrations()[22] - migrates = migrate.NewMigrate(db.Client().Database("test"), migration) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), migration) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err = migrates.Version() + version, _, err = migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(23), version) var migratedNamespace *models.Namespace - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedNamespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedNamespace) assert.NoError(t, err) assert.Equal(t, "namespace-test", migratedNamespace.Name) - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant2"}).Decode(&migratedNamespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant2"}).Decode(&migratedNamespace) assert.NoError(t, err) assert.Equal(t, "namespacetest", migratedNamespace.Name) var migratedDevice *models.Device - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedDevice) assert.NoError(t, err) assert.Equal(t, "device-test", migratedDevice.Name) - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant2"}).Decode(&migratedDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant2"}).Decode(&migratedDevice) assert.NoError(t, err) assert.Equal(t, "devicetest", migratedDevice.Name) - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"name": "name.test"}).Decode(&models.Namespace{}) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"name": "name.test"}).Decode(&models.Namespace{}) assert.EqualError(t, mongo.ErrNoDocuments, err.Error()) } diff --git a/api/store/mongo/migrations/migration_24.go b/api/store/mongo/migrations/migration_24.go index ba4fab1795f..d3f8e8402d9 100644 --- a/api/store/mongo/migrations/migration_24.go +++ b/api/store/mongo/migrations/migration_24.go @@ -12,13 +12,13 @@ import ( var migration24 = migrate.Migration{ Version: 24, Description: "convert names and emails to lowercase", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 24, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("users").UpdateMany(context.TODO(), bson.D{}, []bson.M{ + if _, err := db.Collection("users").UpdateMany(ctx, bson.D{}, []bson.M{ { "$set": bson.M{ "username": bson.M{"$toLower": "$username"}, @@ -29,15 +29,15 @@ var migration24 = migrate.Migration{ return err } - _, err := db.Collection("namespaces").UpdateMany(context.TODO(), bson.D{}, []bson.M{ + _, err := db.Collection("namespaces").UpdateMany(ctx, bson.D{}, []bson.M{ { "$set": bson.M{"name": bson.M{"$toLower": "$name"}}, }, }) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 24, @@ -45,5 +45,5 @@ var migration24 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_24_test.go b/api/store/mongo/migrations/migration_24_test.go index 10efa76f68f..24148699420 100644 --- a/api/store/mongo/migrations/migration_24_test.go +++ b/api/store/mongo/migrations/migration_24_test.go @@ -4,7 +4,6 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" @@ -13,16 +12,17 @@ import ( ) func TestMigration24(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:23] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(23), version) @@ -32,9 +32,11 @@ func TestMigration24(t *testing.T) { Username: "USERNAME", Email: "EMAIL@MAIL.COM", }, - UserPassword: models.NewUserPassword("password"), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, } - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) user = models.User{ @@ -43,9 +45,11 @@ func TestMigration24(t *testing.T) { Username: "Username2", Email: "email@MAIL-TEST.com", }, - UserPassword: models.NewUserPassword("password"), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, } - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) user = models.User{ @@ -54,9 +58,11 @@ func TestMigration24(t *testing.T) { Username: "username3", Email: "email@e-mail.com", }, - UserPassword: models.NewUserPassword("password"), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, } - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) namespace := models.Namespace{ @@ -64,7 +70,7 @@ func TestMigration24(t *testing.T) { Owner: "owner", TenantID: "tenant", } - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) namespace = models.Namespace{ @@ -72,7 +78,7 @@ func TestMigration24(t *testing.T) { Owner: "owner", TenantID: "tenant2", } - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) namespace = models.Namespace{ @@ -80,48 +86,48 @@ func TestMigration24(t *testing.T) { Owner: "owner", TenantID: "tenant3", } - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) migration := GenerateMigrations()[23] - migrates = migrate.NewMigrate(db.Client().Database("test"), migration) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), migration) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err = migrates.Version() + version, _, err = migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(24), version) var migratedUser *models.User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": "name"}).Decode(&migratedUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": "name"}).Decode(&migratedUser) assert.NoError(t, err) assert.Equal(t, "username", migratedUser.Username) assert.Equal(t, "email@mail.com", migratedUser.Email) - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": "name2"}).Decode(&migratedUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": "name2"}).Decode(&migratedUser) assert.NoError(t, err) assert.Equal(t, "username2", migratedUser.Username) assert.Equal(t, "email@mail-test.com", migratedUser.Email) - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": "name3"}).Decode(&migratedUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": "name3"}).Decode(&migratedUser) assert.NoError(t, err) assert.Equal(t, "username3", migratedUser.Username) assert.Equal(t, "email@e-mail.com", migratedUser.Email) var migratedNamespace *models.Namespace - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedNamespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedNamespace) assert.NoError(t, err) assert.Equal(t, "name", migratedNamespace.Name) - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant2"}).Decode(&migratedNamespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant2"}).Decode(&migratedNamespace) assert.NoError(t, err) assert.Equal(t, "test", migratedNamespace.Name) - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant3"}).Decode(&migratedNamespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "tenant3"}).Decode(&migratedNamespace) assert.NoError(t, err) assert.Equal(t, "teste", migratedNamespace.Name) - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"username": "USERNAME"}).Decode(&models.Namespace{}) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"username": "USERNAME"}).Decode(&models.Namespace{}) assert.EqualError(t, mongo.ErrNoDocuments, err.Error()) } diff --git a/api/store/mongo/migrations/migration_25.go b/api/store/mongo/migrations/migration_25.go index fbc0b479afd..fd5b01a68be 100644 --- a/api/store/mongo/migrations/migration_25.go +++ b/api/store/mongo/migrations/migration_25.go @@ -12,7 +12,7 @@ import ( var migration25 = migrate.Migration{ Version: 25, Description: "remove devices with no namespaces related", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 25, @@ -48,11 +48,11 @@ var migration25 = migrate.Migration{ }, } - _, err := db.Collection("devices").Aggregate(context.TODO(), query) + _, err := db.Collection("devices").Aggregate(ctx, query) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 25, @@ -60,5 +60,5 @@ var migration25 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_25_test.go b/api/store/mongo/migrations/migration_25_test.go index 2966083a0a6..d5f0ac61931 100644 --- a/api/store/mongo/migrations/migration_25_test.go +++ b/api/store/mongo/migrations/migration_25_test.go @@ -5,7 +5,6 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" @@ -14,16 +13,17 @@ import ( ) func TestMigration25(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:24] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(24), version) @@ -32,7 +32,7 @@ func TestMigration25(t *testing.T) { Owner: "owner", TenantID: "tenant", } - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) device := models.Device{ @@ -42,7 +42,7 @@ func TestMigration25(t *testing.T) { TenantID: "tenant", LastSeen: time.Now(), } - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) device = models.Device{ @@ -52,7 +52,7 @@ func TestMigration25(t *testing.T) { TenantID: "tenant2", LastSeen: time.Now(), } - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) device = models.Device{ @@ -62,27 +62,27 @@ func TestMigration25(t *testing.T) { TenantID: "tenant3", LastSeen: time.Now(), } - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) migration := GenerateMigrations()[24] - migrates = migrate.NewMigrate(db.Client().Database("test"), migration) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), migration) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err = migrates.Version() + version, _, err = migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(25), version) var migratedDevice *models.Device - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant"}).Decode(&migratedDevice) assert.NoError(t, err) assert.Equal(t, "device", migratedDevice.Name) - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant2"}).Decode(&models.Namespace{}) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant2"}).Decode(&models.Namespace{}) assert.EqualError(t, mongo.ErrNoDocuments, err.Error()) - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant3"}).Decode(&models.Namespace{}) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"tenant_id": "tenant3"}).Decode(&models.Namespace{}) assert.EqualError(t, mongo.ErrNoDocuments, err.Error()) } diff --git a/api/store/mongo/migrations/migration_26.go b/api/store/mongo/migrations/migration_26.go index 55c271f7730..fe58d5061ea 100644 --- a/api/store/mongo/migrations/migration_26.go +++ b/api/store/mongo/migrations/migration_26.go @@ -13,7 +13,7 @@ import ( var migration26 = migrate.Migration{ Version: 26, Description: "Create collection used to recover password and activate account", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 26, @@ -23,7 +23,7 @@ var migration26 = migrate.Migration{ Keys: bson.D{{"created_at", 1}}, Options: options.Index().SetName("ttl").SetExpireAfterSeconds(86400), } - _, err := db.Collection("recovery_tokens").Indexes().CreateOne(context.TODO(), indexModel) + _, err := db.Collection("recovery_tokens").Indexes().CreateOne(ctx, indexModel) if err != nil { return err } @@ -32,7 +32,7 @@ var migration26 = migrate.Migration{ Keys: bson.D{{"token", 1}}, Options: options.Index().SetName("token").SetUnique(false), } - if _, err := db.Collection("recovery_tokens").Indexes().CreateOne(context.TODO(), indexModel); err != nil { + if _, err := db.Collection("recovery_tokens").Indexes().CreateOne(ctx, indexModel); err != nil { return err } @@ -40,13 +40,13 @@ var migration26 = migrate.Migration{ Keys: bson.D{{"user", 1}}, Options: options.Index().SetName("user").SetUnique(false), } - if _, err := db.Collection("recovery_tokens").Indexes().CreateOne(context.TODO(), indexModel); err != nil { + if _, err := db.Collection("recovery_tokens").Indexes().CreateOne(ctx, indexModel); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 26, @@ -54,5 +54,5 @@ var migration26 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_26_test.go b/api/store/mongo/migrations/migration_26_test.go index 4833fb7c280..2bb27e89d8b 100644 --- a/api/store/mongo/migrations/migration_26_test.go +++ b/api/store/mongo/migrations/migration_26_test.go @@ -4,7 +4,6 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" "github.com/shellhub-io/shellhub/pkg/uuid" @@ -15,16 +14,17 @@ import ( ) func TestMigration26(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:26] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(26), version) @@ -33,15 +33,15 @@ func TestMigration26(t *testing.T) { User: "user", CreatedAt: clock.Now(), } - _, err = db.Client().Database("test").Collection("recovery_tokens").InsertOne(context.TODO(), userToken) + _, err = c.Database("test").Collection("recovery_tokens").InsertOne(context.TODO(), userToken) assert.NoError(t, err) var migratedUserToken *models.UserTokenRecover - err = db.Client().Database("test").Collection("recovery_tokens").FindOne(context.TODO(), bson.M{"user": userToken.User}).Decode(&migratedUserToken) + err = c.Database("test").Collection("recovery_tokens").FindOne(context.TODO(), bson.M{"user": userToken.User}).Decode(&migratedUserToken) assert.NoError(t, err) assert.Equal(t, userToken.Token, migratedUserToken.Token) - index := db.Client().Database("test").Collection("recovery_tokens").Indexes() + index := c.Database("test").Collection("recovery_tokens").Indexes() cursor, err := index.List(context.TODO()) assert.NoError(t, err) diff --git a/api/store/mongo/migrations/migration_27.go b/api/store/mongo/migrations/migration_27.go index bb30a5eb212..40bd06c6673 100644 --- a/api/store/mongo/migrations/migration_27.go +++ b/api/store/mongo/migrations/migration_27.go @@ -12,17 +12,17 @@ import ( var migration27 = migrate.Migration{ Version: 27, Description: "Set closed field in the sessions", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 27, "action": "Up", }).Info("Applying migration") - _, err := db.Collection("sessions").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"closed": true}}) + _, err := db.Collection("sessions").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"closed": true}}) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 27, @@ -30,5 +30,5 @@ var migration27 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_27_test.go b/api/store/mongo/migrations/migration_27_test.go index 9b8894c2e09..ba16e44e48a 100644 --- a/api/store/mongo/migrations/migration_27_test.go +++ b/api/store/mongo/migrations/migration_27_test.go @@ -4,22 +4,19 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration27(t *testing.T) { - logrus.Info("Testing Migration 27 - Test closed field in the sessions") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:26]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:26]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) sessionsToBeMigrated := []struct { @@ -41,15 +38,15 @@ func TestMigration27(t *testing.T) { sessions[i] = v } - _, err = db.Client().Database("test").Collection("sessions").InsertMany(context.TODO(), sessions) + _, err = c.Database("test").Collection("sessions").InsertMany(context.TODO(), sessions) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:27]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:27]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) migratedSessions := []models.Session{} - cur, err := db.Client().Database("test").Collection("sessions").Find(context.TODO(), bson.D{}) + cur, err := c.Database("test").Collection("sessions").Find(context.TODO(), bson.D{}) assert.NoError(t, err) for cur.Next(context.TODO()) { var ses models.Session diff --git a/api/store/mongo/migrations/migration_28.go b/api/store/mongo/migrations/migration_28.go index 8d14362a84e..0f333a62369 100644 --- a/api/store/mongo/migrations/migration_28.go +++ b/api/store/mongo/migrations/migration_28.go @@ -13,23 +13,23 @@ import ( var migration28 = migrate.Migration{ Version: 28, Description: "add timestamps fields to collections users and devices", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 28, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("users").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"created_at": clock.Now()}}); err != nil { + if _, err := db.Collection("users").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"created_at": clock.Now()}}); err != nil { return err } - if _, err := db.Collection("devices").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"created_at": clock.Now()}}); err != nil { + if _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"created_at": clock.Now()}}); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 28, @@ -37,5 +37,5 @@ var migration28 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_28_test.go b/api/store/mongo/migrations/migration_28_test.go index 5bc339eac7e..aaf8b89a284 100644 --- a/api/store/mongo/migrations/migration_28_test.go +++ b/api/store/mongo/migrations/migration_28_test.go @@ -4,19 +4,16 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration28(t *testing.T) { - logrus.Info("Testing Migration 28 - Test whether the collection of users and devices the field created_at was created") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) user := models.User{ UserData: models.UserData{ @@ -28,29 +25,29 @@ func TestMigration28(t *testing.T) { UID: "1", } - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) migrations := GenerateMigrations()[27:28] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(28), version) var migratedUser *models.User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": user.Name}).Decode(&migratedUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": user.Name}).Decode(&migratedUser) assert.NoError(t, err) assert.NotNil(t, migratedUser.CreatedAt) var migratedDevice *models.Device - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"uid": device.UID}).Decode(&migratedDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"uid": device.UID}).Decode(&migratedDevice) assert.NoError(t, err) assert.NotNil(t, migratedDevice.CreatedAt) } diff --git a/api/store/mongo/migrations/migration_29.go b/api/store/mongo/migrations/migration_29.go index 19f84f07a0a..4b7c5028883 100644 --- a/api/store/mongo/migrations/migration_29.go +++ b/api/store/mongo/migrations/migration_29.go @@ -12,19 +12,19 @@ import ( var migration29 = migrate.Migration{ Version: 29, Description: "add last_login field to collection users", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 29, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("users").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"last_login": nil}}); err != nil { + if _, err := db.Collection("users").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"last_login": nil}}); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 29, @@ -32,5 +32,5 @@ var migration29 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_29_test.go b/api/store/mongo/migrations/migration_29_test.go index 6c4a486b1c9..12309f4ae95 100644 --- a/api/store/mongo/migrations/migration_29_test.go +++ b/api/store/mongo/migrations/migration_29_test.go @@ -4,9 +4,7 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" @@ -14,10 +12,9 @@ import ( ) func TestMigration29(t *testing.T) { - logrus.Info("Testing Migration 29 - Test whether the collection of users the field last_login was created") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) user := models.User{ UserData: models.UserData{ @@ -25,24 +22,24 @@ func TestMigration29(t *testing.T) { }, } - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) migrations := GenerateMigrations()[:29] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(29), version) var migratedUser *models.User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": user.Name}).Decode(&migratedUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": user.Name}).Decode(&migratedUser) assert.NoError(t, err) - index := db.Client().Database("test").Collection("users").Indexes() + index := c.Database("test").Collection("users").Indexes() cursor, err := index.List(context.TODO()) assert.NoError(t, err) diff --git a/api/store/mongo/migrations/migration_2_test.go b/api/store/mongo/migrations/migration_2_test.go index 2245bd53942..baacffef39f 100644 --- a/api/store/mongo/migrations/migration_2_test.go +++ b/api/store/mongo/migrations/migration_2_test.go @@ -4,19 +4,16 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration2(t *testing.T) { - logrus.Info("Testing Migration 2 - Test if the column device was renamed to device_uid") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) type Session struct { UID string `json:"uid"` @@ -33,21 +30,21 @@ func TestMigration2(t *testing.T) { IPAddress: "0.0.0.0", } - _, err := db.Client().Database("test").Collection("sessions").InsertOne(context.TODO(), session) + _, err := c.Database("test").Collection("sessions").InsertOne(context.TODO(), session) assert.NoError(t, err) var afterMigrationSession *Session - err = db.Client().Database("test").Collection("sessions").FindOne(context.TODO(), bson.M{"device": "deviceUID"}).Decode(&afterMigrationSession) + err = c.Database("test").Collection("sessions").FindOne(context.TODO(), bson.M{"device": "deviceUID"}).Decode(&afterMigrationSession) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:2]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:2]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("sessions").InsertOne(context.TODO(), session) + _, err = c.Database("test").Collection("sessions").InsertOne(context.TODO(), session) assert.NoError(t, err) var migratedSession *models.Session - err = db.Client().Database("test").Collection("sessions").FindOne(context.TODO(), bson.M{"device_uid": "deviceUID"}).Decode(&migratedSession) + err = c.Database("test").Collection("sessions").FindOne(context.TODO(), bson.M{"device_uid": "deviceUID"}).Decode(&migratedSession) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_3.go b/api/store/mongo/migrations/migration_3.go index 1a1d1c6deef..819e87541b3 100644 --- a/api/store/mongo/migrations/migration_3.go +++ b/api/store/mongo/migrations/migration_3.go @@ -1,6 +1,8 @@ package migrations import ( + "context" + "github.com/sirupsen/logrus" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/mongo" @@ -9,7 +11,7 @@ import ( var migration3 = migrate.Migration{ Version: 3, Description: "Rename the column attributes to info", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 3, @@ -17,8 +19,8 @@ var migration3 = migrate.Migration{ }).Info("Applying migration") return renameField(db, "devices", "attributes", "info") - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 3, @@ -26,5 +28,5 @@ var migration3 = migrate.Migration{ }).Info("Applying migration") return renameField(db, "devices", "info", "attributes") - }, + }), } diff --git a/api/store/mongo/migrations/migration_30.go b/api/store/mongo/migrations/migration_30.go index 8f3372c531c..bcd5f6f888a 100644 --- a/api/store/mongo/migrations/migration_30.go +++ b/api/store/mongo/migrations/migration_30.go @@ -12,19 +12,19 @@ import ( var migration30 = migrate.Migration{ Version: 30, Description: "add remote_addr field to collection devices", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 30, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("devices").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"remote_addr": ""}}); err != nil { + if _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"remote_addr": ""}}); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 30, @@ -32,5 +32,5 @@ var migration30 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_30_test.go b/api/store/mongo/migrations/migration_30_test.go index fc5b6248dc6..3a538788d12 100644 --- a/api/store/mongo/migrations/migration_30_test.go +++ b/api/store/mongo/migrations/migration_30_test.go @@ -4,39 +4,36 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration30(t *testing.T) { - logrus.Info("Testing Migration 30 - Test whether the collection of devices the field remote_addr was created") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) device := models.Device{ UID: "1", } - _, err := db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err := c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) migrations := GenerateMigrations()[29:30] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(30), version) var migratedDevice *models.Device - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"uid": device.UID}).Decode(&migratedDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"uid": device.UID}).Decode(&migratedDevice) assert.NoError(t, err) assert.Equal(t, migratedDevice.RemoteAddr, "") } diff --git a/api/store/mongo/migrations/migration_31.go b/api/store/mongo/migrations/migration_31.go index 87234ca9d6a..f3f8c3d23e1 100644 --- a/api/store/mongo/migrations/migration_31.go +++ b/api/store/mongo/migrations/migration_31.go @@ -13,19 +13,19 @@ import ( var migration31 = migrate.Migration{ Version: 31, Description: "add last_login field to collection namespaces", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 31, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("namespaces").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"created_at": clock.Now()}}); err != nil { + if _, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"created_at": clock.Now()}}); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 31, @@ -33,5 +33,5 @@ var migration31 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_31_test.go b/api/store/mongo/migrations/migration_31_test.go index 80543085e24..e9157dba0e2 100644 --- a/api/store/mongo/migrations/migration_31_test.go +++ b/api/store/mongo/migrations/migration_31_test.go @@ -4,39 +4,36 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration31(t *testing.T) { - logrus.Info("Testing Migration 31 - Test whether the collection of namespaces the field created_at was created") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) namespace := models.Namespace{ Name: "Test", } - _, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err := c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) migrations := GenerateMigrations()[30:31] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(31), version) var migratedNamespace *models.Namespace - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"name": namespace.Name}).Decode(&migratedNamespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"name": namespace.Name}).Decode(&migratedNamespace) assert.NoError(t, err) assert.NotNil(t, migratedNamespace.CreatedAt) } diff --git a/api/store/mongo/migrations/migration_32.go b/api/store/mongo/migrations/migration_32.go index 411a16a4740..7d97ab1f0b9 100644 --- a/api/store/mongo/migrations/migration_32.go +++ b/api/store/mongo/migrations/migration_32.go @@ -12,19 +12,19 @@ import ( var migration32 = migrate.Migration{ Version: 32, Description: "add authenticated field to collection users", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 32, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("users").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"authenticated": true}}); err != nil { + if _, err := db.Collection("users").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"authenticated": true}}); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 32, @@ -32,5 +32,5 @@ var migration32 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_32_test.go b/api/store/mongo/migrations/migration_32_test.go index 1ddabca9777..1fe63c9db05 100644 --- a/api/store/mongo/migrations/migration_32_test.go +++ b/api/store/mongo/migrations/migration_32_test.go @@ -4,24 +4,23 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" - "go.mongodb.org/mongo-driver/bson" ) func TestMigration32(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:31] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(31), version) @@ -31,23 +30,20 @@ func TestMigration32(t *testing.T) { Username: "username", Email: "email@mail.com", }, - UserPassword: models.NewUserPassword("password"), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, } - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) migration := GenerateMigrations()[31] - migrates = migrate.NewMigrate(db.Client().Database("test"), migration) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), migration) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err = migrates.Version() + version, _, err = migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(32), version) - - var migratedUser *models.User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": "name"}).Decode(&migratedUser) - assert.NoError(t, err) - assert.Equal(t, false, migratedUser.Confirmed) } diff --git a/api/store/mongo/migrations/migration_33.go b/api/store/mongo/migrations/migration_33.go index ac8771945b4..802232add8b 100644 --- a/api/store/mongo/migrations/migration_33.go +++ b/api/store/mongo/migrations/migration_33.go @@ -13,7 +13,7 @@ import ( var migration33 = migrate.Migration{ Version: 33, Description: "add tags field to collection devices", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 33, @@ -23,18 +23,18 @@ var migration33 = migrate.Migration{ Keys: bson.D{{"tags", 1}}, Options: options.Index().SetName("tags").SetUnique(false), } - _, err := db.Collection("devices").Indexes().CreateOne(context.TODO(), mod) + _, err := db.Collection("devices").Indexes().CreateOne(ctx, mod) if err != nil { return err } - if _, err := db.Collection("devices").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"tags": []string{}}}); err != nil { + if _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"tags": []string{}}}); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 33, @@ -42,5 +42,5 @@ var migration33 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_33_test.go b/api/store/mongo/migrations/migration_33_test.go index b0de02de29c..9af00338cc8 100644 --- a/api/store/mongo/migrations/migration_33_test.go +++ b/api/store/mongo/migrations/migration_33_test.go @@ -4,7 +4,6 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" @@ -12,16 +11,17 @@ import ( ) func TestMigration33(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:32] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(32), version) @@ -29,21 +29,21 @@ func TestMigration33(t *testing.T) { UID: "1", TenantID: "tenant", } - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), &device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), &device) assert.NoError(t, err) migration := GenerateMigrations()[32:33] - migrates = migrate.NewMigrate(db.Client().Database("test"), migration...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), migration...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err = migrates.Version() + version, _, err = migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(33), version) var migratedDevice *models.Device - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"uid": device.UID}).Decode(&migratedDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"uid": device.UID}).Decode(&migratedDevice) assert.NoError(t, err) assert.Equal(t, 0, len(migratedDevice.Tags)) } diff --git a/api/store/mongo/migrations/migration_34.go b/api/store/mongo/migrations/migration_34.go index d61e54c386f..081dc12de46 100644 --- a/api/store/mongo/migrations/migration_34.go +++ b/api/store/mongo/migrations/migration_34.go @@ -12,7 +12,7 @@ import ( var migration34 = migrate.Migration{ Version: 34, Description: "create online index in devices collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 34, @@ -23,19 +23,19 @@ var migration34 = migrate.Migration{ Keys: bson.D{{"online", 1}}, } - _, err := db.Collection("devices").Indexes().CreateOne(context.TODO(), indexModel) + _, err := db.Collection("devices").Indexes().CreateOne(ctx, indexModel) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 34, "action": "Down", }).Info("Applying migration") - _, err := db.Collection("devices").Indexes().DropOne(context.TODO(), "online") + _, err := db.Collection("devices").Indexes().DropOne(ctx, "online") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_34_test.go b/api/store/mongo/migrations/migration_34_test.go index 733ff1e5941..5ecf6ba45a3 100644 --- a/api/store/mongo/migrations/migration_34_test.go +++ b/api/store/mongo/migrations/migration_34_test.go @@ -4,7 +4,6 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" @@ -12,31 +11,32 @@ import ( ) func TestMigration34(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:33] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(33), version) migrations = GenerateMigrations()[:34] - migrates = migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err = migrates.Version() + version, _, err = migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(34), version) - cursor, err := db.Client().Database("test").Collection("devices").Indexes().List(context.TODO()) + cursor, err := c.Database("test").Collection("devices").Indexes().List(context.TODO()) assert.NoError(t, err) var results []bson.M diff --git a/api/store/mongo/migrations/migration_35.go b/api/store/mongo/migrations/migration_35.go index 933a2769003..7c47f6abb3c 100644 --- a/api/store/mongo/migrations/migration_35.go +++ b/api/store/mongo/migrations/migration_35.go @@ -1,6 +1,8 @@ package migrations import ( + "context" + "github.com/sirupsen/logrus" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/mongo" @@ -9,7 +11,7 @@ import ( var migration35 = migrate.Migration{ Version: 35, Description: "Rename the column authenticated to confirmed", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 35, @@ -17,8 +19,8 @@ var migration35 = migrate.Migration{ }).Info("Applying migration") return renameField(db, "users", "authenticated", "confirmed") - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 35, @@ -26,5 +28,5 @@ var migration35 = migrate.Migration{ }).Info("Applying migration") return renameField(db, "users", "confirmed", "authenticated") - }, + }), } diff --git a/api/store/mongo/migrations/migration_35_test.go b/api/store/mongo/migrations/migration_35_test.go index a4915f19847..658b707f350 100644 --- a/api/store/mongo/migrations/migration_35_test.go +++ b/api/store/mongo/migrations/migration_35_test.go @@ -4,19 +4,16 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration35(t *testing.T) { - logrus.Info("Testing Migration 35 - Test if the column authenticated was renamed to confirmed") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) type User struct { ID string `json:"id,omitempty" bson:"_id,omitempty"` @@ -36,18 +33,18 @@ func TestMigration35(t *testing.T) { }, } - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) var afterMigrationUser *User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"username": "username"}).Decode(&afterMigrationUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"username": "username"}).Decode(&afterMigrationUser) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[34:35]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[34:35]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) var migratedUser *models.User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"username": "username"}).Decode(&migratedUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"username": "username"}).Decode(&migratedUser) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_36.go b/api/store/mongo/migrations/migration_36.go index ce02a94e90a..2f07cbb781f 100644 --- a/api/store/mongo/migrations/migration_36.go +++ b/api/store/mongo/migrations/migration_36.go @@ -13,7 +13,7 @@ import ( var migration36 = migrate.Migration{ Version: 36, Description: "update max_devices to 3", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 36, @@ -21,14 +21,14 @@ var migration36 = migrate.Migration{ }).Info("Applying migration") if envs.IsCloud() { - if _, err := db.Collection("namespaces").UpdateMany(context.TODO(), bson.M{"billing": nil}, bson.M{"$set": bson.M{"max_devices": 3}}); err != nil { + if _, err := db.Collection("namespaces").UpdateMany(ctx, bson.M{"billing": nil}, bson.M{"$set": bson.M{"max_devices": 3}}); err != nil { return err } } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 36, @@ -36,5 +36,5 @@ var migration36 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_36_test.go b/api/store/mongo/migrations/migration_36_test.go index 00b72c8eef5..2f87a0e5868 100644 --- a/api/store/mongo/migrations/migration_36_test.go +++ b/api/store/mongo/migrations/migration_36_test.go @@ -4,27 +4,25 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/envs" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration36(t *testing.T) { - logrus.Info("Testing Migration 36 - Test namespace update max_devices in Cloud") - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:35] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(35), version) @@ -75,18 +73,18 @@ func TestMigration36(t *testing.T) { namespaces[i] = v.toBeMigrated } - _, err = db.Client().Database("test").Collection("namespaces").InsertMany(context.TODO(), namespaces) + _, err = c.Database("test").Collection("namespaces").InsertMany(context.TODO(), namespaces) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[35]) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[35]) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err = migrates.Version() + version, _, err = migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(36), version) - cur, err := db.Client().Database("test").Collection("namespaces").Find(context.TODO(), bson.D{}) + cur, err := c.Database("test").Collection("namespaces").Find(context.TODO(), bson.D{}) assert.NoError(t, err) index := 0 diff --git a/api/store/mongo/migrations/migration_37.go b/api/store/mongo/migrations/migration_37.go index 7f85311d188..7fa649e8f81 100644 --- a/api/store/mongo/migrations/migration_37.go +++ b/api/store/mongo/migrations/migration_37.go @@ -4,7 +4,7 @@ import ( "context" "time" - "github.com/shellhub-io/shellhub/api/pkg/guard" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/models" "github.com/sirupsen/logrus" migrate "github.com/xakep666/mongo-migrate" @@ -15,13 +15,13 @@ import ( var migration37 = migrate.Migration{ Version: 37, Description: "Change member's role from array of ID to a list of members' object", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 37, "action": "Up", }).Info("Applying migration") - cursor, err := db.Collection("namespaces").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("namespaces").Find(ctx, bson.D{}) if err != nil { return err } @@ -44,7 +44,7 @@ var migration37 = migrate.Migration{ Billing interface{} `json:"billing" bson:"billing,omitempty"` } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { namespace := new(Namespace) err = cursor.Decode(&namespace) if err != nil { @@ -59,21 +59,21 @@ var migration37 = migrate.Migration{ if owner != member { m := models.Member{ ID: member.(string), - Role: guard.RoleObserver, + Role: authorizer.RoleObserver, } memberList = append(memberList, m) } else if owner == member { m := models.Member{ ID: member.(string), - Role: guard.RoleOwner, + Role: authorizer.RoleOwner, } memberList = append(memberList, m) } } - if _, err := db.Collection("namespaces").UpdateOne(context.TODO(), bson.M{"tenant_id": namespace.TenantID}, bson.M{"$set": bson.M{"members": memberList}}); err != nil { + if _, err := db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": namespace.TenantID}, bson.M{"$set": bson.M{"members": memberList}}); err != nil { return err } } @@ -82,22 +82,22 @@ var migration37 = migrate.Migration{ return err } - cursor.Close(context.TODO()) + cursor.Close(ctx) return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 37, "action": "Down", }).Info("Applying migration") - cursor, err := db.Collection("namespaces").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("namespaces").Find(ctx, bson.D{}) if err != nil { return err } - for cursor.Next(context.TODO()) { + for cursor.Next(ctx) { namespace := new(models.Namespace) err = cursor.Decode(&namespace) if err != nil { @@ -109,7 +109,7 @@ var migration37 = migrate.Migration{ membersList = append(membersList, member.ID) } - if _, err := db.Collection("namespaces").UpdateOne(context.TODO(), bson.M{"tenant_id": namespace.TenantID}, bson.M{"$set": bson.M{"members": membersList}}); err != nil { + if _, err := db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": namespace.TenantID}, bson.M{"$set": bson.M{"members": membersList}}); err != nil { return err } } @@ -118,8 +118,8 @@ var migration37 = migrate.Migration{ return err } - cursor.Close(context.TODO()) + cursor.Close(ctx) return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_37_test.go b/api/store/mongo/migrations/migration_37_test.go index 1312dd055d2..2a71b1a274a 100644 --- a/api/store/mongo/migrations/migration_37_test.go +++ b/api/store/mongo/migrations/migration_37_test.go @@ -5,20 +5,17 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/guard" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration37(t *testing.T) { - logrus.Info("Testing Migration 37") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) user := models.User{ ID: "60df59bc65f88d92b974a60f", @@ -50,34 +47,34 @@ func TestMigration37(t *testing.T) { } migrations := GenerateMigrations()[36:37] - _, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), ns) + _, err := c.Database("test").Collection("namespaces").InsertOne(context.TODO(), ns) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) migratedNamespace := &models.Namespace{} - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.D{{"tenant_id", "tenant"}}).Decode(migratedNamespace) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.D{{"tenant_id", "tenant"}}).Decode(migratedNamespace) assert.NoError(t, err) - assert.Equal(t, []models.Member{{ID: user.ID, Role: guard.RoleOwner}}, migratedNamespace.Members) + assert.Equal(t, []models.Member{{ID: user.ID, Role: authorizer.RoleOwner}}, migratedNamespace.Members) namespace := models.Namespace{ Name: "userspace", Owner: user.ID, TenantID: "tenant", - Members: []models.Member{{ID: user.ID, Role: guard.RoleOwner}}, + Members: []models.Member{{ID: user.ID, Role: authorizer.RoleOwner}}, Devices: -1, } - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Down(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Down(context.Background(), migrate.AllAvailable) assert.NoError(t, err) migratedNamespaceDown := &Namespace{} - err = db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.D{{"tenant_id", namespace.TenantID}}).Decode(migratedNamespaceDown) + err = c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.D{{"tenant_id", namespace.TenantID}}).Decode(migratedNamespaceDown) assert.NoError(t, err) assert.Equal(t, []interface{}{user.ID}, migratedNamespaceDown.Members) } diff --git a/api/store/mongo/migrations/migration_38.go b/api/store/mongo/migrations/migration_38.go index 9a781a828ee..a2941020b46 100644 --- a/api/store/mongo/migrations/migration_38.go +++ b/api/store/mongo/migrations/migration_38.go @@ -13,14 +13,14 @@ import ( var migration38 = migrate.Migration{ Version: 38, Description: "Set last_login to created_at, when created_at is a zero value", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 38, "action": "Up", }).Info("Applying migration") zeroTime := time.Time{}.UTC() - _, err := db.Collection("users").Aggregate(context.TODO(), + _, err := db.Collection("users").Aggregate(ctx, mongo.Pipeline{ { {"$match", bson.D{ @@ -46,8 +46,8 @@ var migration38 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 38, @@ -55,5 +55,5 @@ var migration38 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_38_test.go b/api/store/mongo/migrations/migration_38_test.go index 8ca88ea2794..28a16c5f798 100644 --- a/api/store/mongo/migrations/migration_38_test.go +++ b/api/store/mongo/migrations/migration_38_test.go @@ -5,19 +5,16 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration38(t *testing.T) { - logrus.Info("Testing Migration 38") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) type Expected struct { CreatedAt string @@ -34,40 +31,40 @@ func TestMigration38(t *testing.T) { } userNoCreatedAt := models.User{ - ID: "userNoCreatedID", - Namespaces: 0, - Confirmed: false, - CreatedAt: timeZero, - LastLogin: timeNow, + ID: "userNoCreatedID", + CreatedAt: timeZero, + LastLogin: timeNow, UserData: models.UserData{ Name: "userNoCreatedAt", Email: "userNoCreatedAt@mail.com", Username: "userNoCreatedAt", }, - UserPassword: models.NewUserPassword(""), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, } userWithCreatedAt := models.User{ - ID: "userWithCreatedID", - Namespaces: 0, - Confirmed: false, - CreatedAt: timePast, - LastLogin: timeNow, + ID: "userWithCreatedID", + CreatedAt: timePast, + LastLogin: timeNow, UserData: models.UserData{ Name: "userWithCreatedAt", Email: "userWithCreatedAt@mail.com", Username: "userWithCreatedAt", }, - UserPassword: models.NewUserPassword(""), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, } - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), userNoCreatedAt) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), userNoCreatedAt) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), userWithCreatedAt) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), userWithCreatedAt) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) cases := []struct { @@ -80,7 +77,7 @@ func TestMigration38(t *testing.T) { t.Helper() var userMigrated *models.User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.D{{"username", userNoCreatedAt.Username}}).Decode(&userMigrated) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.D{{"username", userNoCreatedAt.Username}}).Decode(&userMigrated) assert.NoError(t, err) assert.Equal(t, Expected{CreatedAt: convertDate(userNoCreatedAt.LastLogin), LastLogin: convertDate(userNoCreatedAt.LastLogin)}, @@ -94,7 +91,7 @@ func TestMigration38(t *testing.T) { t.Helper() var userMigrated *models.User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.D{{"username", userWithCreatedAt.Username}}).Decode(&userMigrated) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.D{{"username", userWithCreatedAt.Username}}).Decode(&userMigrated) assert.NoError(t, err) assert.Equal(t, Expected{CreatedAt: convertDate(userWithCreatedAt.CreatedAt), LastLogin: convertDate(userWithCreatedAt.LastLogin)}, diff --git a/api/store/mongo/migrations/migration_39.go b/api/store/mongo/migrations/migration_39.go index caf586f3f29..d3ceb48500b 100644 --- a/api/store/mongo/migrations/migration_39.go +++ b/api/store/mongo/migrations/migration_39.go @@ -12,22 +12,22 @@ import ( var migration39 = migrate.Migration{ Version: 39, Description: "remove online index from devices collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 39, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("devices").Indexes().DropOne(context.TODO(), "online_1"); err != nil { + if _, err := db.Collection("devices").Indexes().DropOne(ctx, "online_1"); err != nil { return err } - _, err := db.Collection("devices").UpdateMany(context.TODO(), bson.M{}, bson.M{"$unset": bson.M{"online": nil}}) + _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"online": nil}}) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 39, @@ -38,8 +38,8 @@ var migration39 = migrate.Migration{ Keys: bson.D{{"online", 1}}, } - _, err := db.Collection("devices").Indexes().CreateOne(context.TODO(), indexModel) + _, err := db.Collection("devices").Indexes().CreateOne(ctx, indexModel) return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_39_test.go b/api/store/mongo/migrations/migration_39_test.go index d4cbc92c095..62b7aaa9f23 100644 --- a/api/store/mongo/migrations/migration_39_test.go +++ b/api/store/mongo/migrations/migration_39_test.go @@ -1,27 +1,25 @@ package migrations import ( + "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration39(t *testing.T) { - logrus.Info("Testing Migration 39") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) migrations := GenerateMigrations()[:39] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(39), version) } diff --git a/api/store/mongo/migrations/migration_3_test.go b/api/store/mongo/migrations/migration_3_test.go index 5bbd5a20a34..7e450256b4c 100644 --- a/api/store/mongo/migrations/migration_3_test.go +++ b/api/store/mongo/migrations/migration_3_test.go @@ -4,19 +4,16 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration3(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() - - logrus.Info("Testing Migration 3 - Test if the column attributes was renamed to info") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) type Device struct { Attributes *models.DeviceInfo `json:"attributes"` @@ -28,18 +25,18 @@ func TestMigration3(t *testing.T) { }, } - _, err := db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err := c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) var afterMigrateDevice *models.Session - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"attributes": &models.DeviceInfo{ID: "1"}}).Decode(&afterMigrateDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"attributes": &models.DeviceInfo{ID: "1"}}).Decode(&afterMigrateDevice) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:3]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:3]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) var migratedDevice *models.Device - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"info": &models.DeviceInfo{ID: "1"}}).Decode(&migratedDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"info": &models.DeviceInfo{ID: "1"}}).Decode(&migratedDevice) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_4.go b/api/store/mongo/migrations/migration_4.go index b1e29a9d992..a5ef1b220e0 100644 --- a/api/store/mongo/migrations/migration_4.go +++ b/api/store/mongo/migrations/migration_4.go @@ -1,6 +1,8 @@ package migrations import ( + "context" + "github.com/sirupsen/logrus" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/mongo" @@ -9,7 +11,7 @@ import ( var migration4 = migrate.Migration{ Version: 4, Description: "Rename the column version to info.version", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 4, @@ -17,8 +19,8 @@ var migration4 = migrate.Migration{ }).Info("Applying migration") return renameField(db, "devices", "version", "info.version") - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 4, @@ -26,5 +28,5 @@ var migration4 = migrate.Migration{ }).Info("Applying migration") return renameField(db, "devices", "info.version", "version") - }, + }), } diff --git a/api/store/mongo/migrations/migration_40.go b/api/store/mongo/migrations/migration_40.go index ec85af53287..149d2f1952c 100644 --- a/api/store/mongo/migrations/migration_40.go +++ b/api/store/mongo/migrations/migration_40.go @@ -13,14 +13,14 @@ import ( var migration40 = migrate.Migration{ Version: 40, Description: "remove online index from devices collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 40, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen"); err != nil { + if _, err := db.Collection("connected_devices").Indexes().DropOne(ctx, "last_seen"); err != nil { return err } @@ -28,20 +28,20 @@ var migration40 = migrate.Migration{ Keys: bson.D{{"last_seen", 1}}, Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(60), } - if _, err := db.Collection("connected_devices").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("connected_devices").Indexes().CreateOne(ctx, mod); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 40, "action": "Down", }).Info("Applying migration") - if _, err := db.Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen"); err != nil { + if _, err := db.Collection("connected_devices").Indexes().DropOne(ctx, "last_seen"); err != nil { return err } @@ -49,10 +49,10 @@ var migration40 = migrate.Migration{ Keys: bson.D{{"last_seen", 1}}, Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(30), } - if _, err := db.Collection("connected_devices").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("connected_devices").Indexes().CreateOne(ctx, mod); err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_40_test.go b/api/store/mongo/migrations/migration_40_test.go index 3d8d16c8a53..a249c469f3b 100644 --- a/api/store/mongo/migrations/migration_40_test.go +++ b/api/store/mongo/migrations/migration_40_test.go @@ -4,8 +4,6 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" @@ -14,22 +12,6 @@ import ( ) func TestMigration40(t *testing.T) { - logrus.Info("Testing Migration 40") - - db := dbtest.DBServer{} - defer db.Stop() - - oldIndex := mongo.IndexModel{ - Keys: bson.D{{"last_seen", 1}}, - Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(30), - } - newIndex := mongo.IndexModel{ - Keys: bson.D{{"last_seen", 1}}, - Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(30), - } - _, err := db.Client().Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -39,19 +21,30 @@ func TestMigration40(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[39:40] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + oldIndex := mongo.IndexModel{ + Keys: bson.D{{"last_seen", 1}}, + Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(30), + } + + newIndex := mongo.IndexModel{ + Keys: bson.D{{"last_seen", 1}}, + Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(30), + } + _, err := c.Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen") + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[39:40]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + _, err = c.Database("test").Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen") assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), newIndex) + _, err = c.Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), newIndex) assert.NoError(t, err) const Expected = 1 - list, err := db.Client().Database("test").Collection("connected_devices").Indexes().ListSpecifications(context.TODO()) + list, err := c.Database("test").Collection("connected_devices").Indexes().ListSpecifications(context.TODO()) assert.NoError(t, err) assert.Equal(t, newIndex.Options.ExpireAfterSeconds, list[Expected].ExpireAfterSeconds) @@ -62,19 +55,26 @@ func TestMigration40(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[39:40] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Down(migrate.AllAvailable) + oldIndex := mongo.IndexModel{ + Keys: bson.D{{"last_seen", 1}}, + Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(30), + } + + _, err := c.Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) + assert.NoError(t, err) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[39:40]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen") + _, err = c.Database("test").Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen") assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) + _, err = c.Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) assert.NoError(t, err) const Expected = 1 - list, err := db.Client().Database("test").Collection("connected_devices").Indexes().ListSpecifications(context.TODO()) + list, err := c.Database("test").Collection("connected_devices").Indexes().ListSpecifications(context.TODO()) assert.NoError(t, err) assert.Equal(t, oldIndex.Options.ExpireAfterSeconds, list[Expected].ExpireAfterSeconds) @@ -84,6 +84,11 @@ func TestMigration40(t *testing.T) { for _, test := range cases { tc := test - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_41.go b/api/store/mongo/migrations/migration_41.go index 0e3c1ac034e..22d566b54bc 100644 --- a/api/store/mongo/migrations/migration_41.go +++ b/api/store/mongo/migrations/migration_41.go @@ -13,14 +13,14 @@ import ( var migration41 = migrate.Migration{ Version: 41, Description: "update online index from devices collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 41, "action": "Up", }).Info("Applying migration") - if _, err := db.Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen"); err != nil { + if _, err := db.Collection("connected_devices").Indexes().DropOne(ctx, "last_seen"); err != nil { return err } @@ -28,20 +28,20 @@ var migration41 = migrate.Migration{ Keys: bson.D{{"last_seen", 1}}, Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(120), } - if _, err := db.Collection("connected_devices").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("connected_devices").Indexes().CreateOne(ctx, mod); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 41, "action": "Down", }).Info("Applying migration") - if _, err := db.Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen"); err != nil { + if _, err := db.Collection("connected_devices").Indexes().DropOne(ctx, "last_seen"); err != nil { return err } @@ -49,10 +49,10 @@ var migration41 = migrate.Migration{ Keys: bson.D{{"last_seen", 1}}, Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(60), } - if _, err := db.Collection("connected_devices").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("connected_devices").Indexes().CreateOne(ctx, mod); err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_41_test.go b/api/store/mongo/migrations/migration_41_test.go index e70ec763b44..37d25e1c6df 100644 --- a/api/store/mongo/migrations/migration_41_test.go +++ b/api/store/mongo/migrations/migration_41_test.go @@ -4,8 +4,6 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" @@ -14,22 +12,6 @@ import ( ) func TestMigration41(t *testing.T) { - logrus.Info("Testing Migration 41") - - db := dbtest.DBServer{} - defer db.Stop() - - oldIndex := mongo.IndexModel{ - Keys: bson.D{{"last_seen", 1}}, - Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(60), - } - newIndex := mongo.IndexModel{ - Keys: bson.D{{"last_seen", 1}}, - Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(60), - } - _, err := db.Client().Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -39,19 +21,28 @@ func TestMigration41(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[40:41] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) - + oldIndex := mongo.IndexModel{ + Keys: bson.D{{"last_seen", 1}}, + Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(60), + } + newIndex := mongo.IndexModel{ + Keys: bson.D{{"last_seen", 1}}, + Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(60), + } + _, err := c.Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen") + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[40:41]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + _, err = c.Database("test").Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen") assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), newIndex) + _, err = c.Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), newIndex) assert.NoError(t, err) const Expected = 1 - list, err := db.Client().Database("test").Collection("connected_devices").Indexes().ListSpecifications(context.TODO()) + list, err := c.Database("test").Collection("connected_devices").Indexes().ListSpecifications(context.TODO()) assert.NoError(t, err) assert.Equal(t, newIndex.Options.ExpireAfterSeconds, list[Expected].ExpireAfterSeconds) @@ -62,19 +53,25 @@ func TestMigration41(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[40:41] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Down(migrate.AllAvailable) + oldIndex := mongo.IndexModel{ + Keys: bson.D{{"last_seen", 1}}, + Options: options.Index().SetName("last_seen").SetExpireAfterSeconds(60), + } + _, err := c.Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) + assert.NoError(t, err) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[40:41]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen") + _, err = c.Database("test").Collection("connected_devices").Indexes().DropOne(context.TODO(), "last_seen") assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) + _, err = c.Database("test").Collection("connected_devices").Indexes().CreateOne(context.TODO(), oldIndex) assert.NoError(t, err) const Expected = 1 - list, err := db.Client().Database("test").Collection("connected_devices").Indexes().ListSpecifications(context.TODO()) + list, err := c.Database("test").Collection("connected_devices").Indexes().ListSpecifications(context.TODO()) assert.NoError(t, err) assert.Equal(t, oldIndex.Options.ExpireAfterSeconds, list[Expected].ExpireAfterSeconds) @@ -84,6 +81,11 @@ func TestMigration41(t *testing.T) { for _, test := range cases { tc := test - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_42.go b/api/store/mongo/migrations/migration_42.go index 13d7fc1b678..31ac404144f 100644 --- a/api/store/mongo/migrations/migration_42.go +++ b/api/store/mongo/migrations/migration_42.go @@ -12,14 +12,14 @@ import ( var migration42 = migrate.Migration{ Version: 42, Description: "change hostname to filter hostname", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 42, "action": "Up", }).Info("Applying migration") - _, err := db.Collection("public_keys").Aggregate(context.TODO(), + _, err := db.Collection("public_keys").Aggregate(ctx, mongo.Pipeline{ { {"$match", bson.M{}}, @@ -40,15 +40,15 @@ var migration42 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 42, "action": "Down", }).Info("Applying migration") - _, err := db.Collection("public_keys").Aggregate(context.TODO(), + _, err := db.Collection("public_keys").Aggregate(ctx, mongo.Pipeline{ { {"$match", bson.M{}}, @@ -69,5 +69,5 @@ var migration42 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_42_test.go b/api/store/mongo/migrations/migration_42_test.go index cc55e87ad82..b284aa0dbeb 100644 --- a/api/store/mongo/migrations/migration_42_test.go +++ b/api/store/mongo/migrations/migration_42_test.go @@ -5,20 +5,13 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration42(t *testing.T) { - logrus.Info("Testing Migration 42") - - db := dbtest.DBServer{} - defer db.Stop() - type PublicKeyFields struct { Name string `json:"name"` Username string `json:"username" bson:"username,omitempty" validate:"regexp"` @@ -33,31 +26,6 @@ func TestMigration42(t *testing.T) { PublicKeyFields `bson:",inline"` } - keyOld := &PublicKey{ - Fingerprint: "fingerprint", - TenantID: "tenant", - PublicKeyFields: PublicKeyFields{ - Name: "key", - Username: ".*", - Hostname: ".*", - }, - } - - keyNew := &models.PublicKey{ - Fingerprint: "fingerprint", - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Name: "key", - Username: ".*", - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, - } - - _, err := db.Client().Database("test").Collection("public_keys").InsertOne(context.TODO(), keyOld) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -67,13 +35,36 @@ func TestMigration42(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[41:42] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + keyOld := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: ".*", + Hostname: ".*", + }, + } + + keyNew := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: models.PublicKeyFields{ + Name: "key", + Username: ".*", + Filter: models.PublicKeyFilter{ + Hostname: ".*", + }, + }, + } + + _, err := c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyOld) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[41:42]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + key := new(models.PublicKey) - result := db.Client().Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyOld.TenantID}) + result := c.Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyOld.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -87,13 +78,36 @@ func TestMigration42(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[41:42] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + keyOld := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: ".*", + Hostname: ".*", + }, + } + + keyNew := &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: models.PublicKeyFields{ + Name: "key", + Username: ".*", + Filter: models.PublicKeyFilter{ + Hostname: ".*", + }, + }, + } + + _, err := c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyOld) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[41:42]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + key := new(PublicKey) - result := db.Client().Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyNew.TenantID}) + result := c.Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyNew.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -106,6 +120,11 @@ func TestMigration42(t *testing.T) { for _, test := range cases { tc := test - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_43.go b/api/store/mongo/migrations/migration_43.go index 14aa3e582cd..1ccb3529a2e 100644 --- a/api/store/mongo/migrations/migration_43.go +++ b/api/store/mongo/migrations/migration_43.go @@ -12,14 +12,14 @@ import ( var migration43 = migrate.Migration{ Version: 43, Description: "add tags field to firewall_rules collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 43, "action": "Up", }).Info("Applying migration") - _, err := db.Collection("firewall_rules").Aggregate(context.TODO(), + _, err := db.Collection("firewall_rules").Aggregate(ctx, mongo.Pipeline{ { {"$match", bson.M{}}, @@ -40,15 +40,15 @@ var migration43 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 43, "action": "Down", }).Info("Applying migration") - _, err := db.Collection("firewall_rules").Aggregate(context.TODO(), + _, err := db.Collection("firewall_rules").Aggregate(ctx, mongo.Pipeline{ { {"$match", bson.M{}}, @@ -69,5 +69,5 @@ var migration43 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_43_test.go b/api/store/mongo/migrations/migration_43_test.go index 1a7352ff1c4..3b9d0dc9b67 100644 --- a/api/store/mongo/migrations/migration_43_test.go +++ b/api/store/mongo/migrations/migration_43_test.go @@ -4,20 +4,13 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration43(t *testing.T) { - logrus.Info("Testing Migration 43") - - db := dbtest.DBServer{} - defer db.Stop() - type FirewallRuleFields struct { Priority int `json:"priority"` Action string `json:"action" validate:"required,oneof=allow deny"` @@ -33,27 +26,6 @@ func TestMigration43(t *testing.T) { FirewallRuleFields `bson:",inline"` } - ruleOld := &FirewallRule{ - ID: "ruleID", - TenantID: "tenant", - FirewallRuleFields: FirewallRuleFields{ - Hostname: ".*", - }, - } - - ruleNew := &models.FirewallRule{ - ID: "ruleID", - TenantID: "tenant", - FirewallRuleFields: models.FirewallRuleFields{ - Filter: models.FirewallFilter{ - Hostname: ".*", - }, - }, - } - - _, err := db.Client().Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleOld) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -63,13 +35,32 @@ func TestMigration43(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[42:43] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + ruleOld := &FirewallRule{ + ID: "ruleID", + TenantID: "tenant", + FirewallRuleFields: FirewallRuleFields{ + Hostname: ".*", + }, + } + + ruleNew := &models.FirewallRule{ + ID: "ruleID", + TenantID: "tenant", + FirewallRuleFields: models.FirewallRuleFields{ + Filter: models.FirewallFilter{ + Hostname: ".*", + }, + }, + } + + _, err := c.Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleOld) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[42:43]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + rule := new(models.FirewallRule) - result := db.Client().Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleOld.TenantID}) + result := c.Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleOld.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(rule) @@ -83,13 +74,32 @@ func TestMigration43(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[42:43] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + ruleOld := &FirewallRule{ + ID: "ruleID", + TenantID: "tenant", + FirewallRuleFields: FirewallRuleFields{ + Hostname: ".*", + }, + } + + ruleNew := &models.FirewallRule{ + ID: "ruleID", + TenantID: "tenant", + FirewallRuleFields: models.FirewallRuleFields{ + Filter: models.FirewallFilter{ + Hostname: ".*", + }, + }, + } + + _, err := c.Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleOld) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[42:43]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + rule := new(FirewallRule) - result := db.Client().Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleNew.TenantID}) + result := c.Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleNew.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(rule) @@ -102,6 +112,11 @@ func TestMigration43(t *testing.T) { for _, test := range cases { tc := test - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_44.go b/api/store/mongo/migrations/migration_44.go index 57f2eb91cb5..91909f8bf33 100644 --- a/api/store/mongo/migrations/migration_44.go +++ b/api/store/mongo/migrations/migration_44.go @@ -12,14 +12,14 @@ import ( var migration44 = migrate.Migration{ Version: 44, Description: "remove duplicated tags on public keys", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 44, "action": "Up", }).Info("Applying migration") - _, err := db.Collection("public_keys").Aggregate(context.TODO(), + _, err := db.Collection("public_keys").Aggregate(ctx, mongo.Pipeline{ { {"$match", bson.M{"filter.tags": bson.M{"$exists": true}}}, @@ -52,8 +52,8 @@ var migration44 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 44, @@ -61,5 +61,5 @@ var migration44 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_44_test.go b/api/store/mongo/migrations/migration_44_test.go index a0344042f2c..e35ab918370 100644 --- a/api/store/mongo/migrations/migration_44_test.go +++ b/api/store/mongo/migrations/migration_44_test.go @@ -4,76 +4,33 @@ import ( "context" "sort" "testing" + "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration44(t *testing.T) { - logrus.Info("Testing Migration 44") - - db := dbtest.DBServer{} - defer db.Stop() - - keyTagDuplicated := &models.PublicKey{ - Fingerprint: "fingerprint", - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Name: "key", - Username: ".*", - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2", "tag2"}, - }, - }, - } - - keyTagWithoutDuplication := &models.PublicKey{ - Fingerprint: "fingerprint", - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Name: "key", - Username: ".*", - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2"}, - }, - }, + type PublicKeyFilter struct { + Hostname string `json:"hostname,omitempty" bson:"hostname,omitempty" validate:"required_without=Tags,excluded_with=Tags,regexp"` + Tags []string `json:"tags,omitempty" bson:"tags,omitempty" validate:"required_without=Hostname,excluded_with=Hostname,max=3,unique,dive,min=3,max=255,alphanum,ascii,excludes=/@&:"` } - keyTagNoDuplicated := &models.PublicKey{ - Fingerprint: "fingerprint1", - TenantID: "tenant1", - PublicKeyFields: models.PublicKeyFields{ - Name: "key1", - Username: ".*", - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2", "tag3"}, - }, - }, + type PublicKeyFields struct { + Name string `json:"name"` + Username string `json:"username" bson:"username" validate:"regexp"` + Filter PublicKeyFilter `json:"filter" bson:"filter" validate:"required"` } - keyHostname := &models.PublicKey{ - Fingerprint: "fingerprint2", - TenantID: "tenant2", - PublicKeyFields: models.PublicKeyFields{ - Name: "key2", - Username: ".*", - Filter: models.PublicKeyFilter{ - Hostname: ".*", - }, - }, + type PublicKey struct { + Data []byte `json:"data"` + Fingerprint string `json:"fingerprint"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` + TenantID string `json:"tenant_id" bson:"tenant_id"` + PublicKeyFields `bson:",inline"` } - _, err := db.Client().Database("test").Collection("public_keys").InsertOne(context.TODO(), keyTagDuplicated) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("public_keys").InsertOne(context.TODO(), keyTagNoDuplicated) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("public_keys").InsertOne(context.TODO(), keyHostname) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -83,13 +40,66 @@ func TestMigration44(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[43:44] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + keyTagDuplicated := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag2"}, + }, + }, + } + + keyTagWithoutDuplication := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2"}, + }, + }, + } + + keyTagNoDuplicated := &PublicKey{ + Fingerprint: "fingerprint1", + TenantID: "tenant1", + PublicKeyFields: PublicKeyFields{ + Name: "key1", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag3"}, + }, + }, + } + + keyHostname := &PublicKey{ + Fingerprint: "fingerprint2", + TenantID: "tenant2", + PublicKeyFields: PublicKeyFields{ + Name: "key2", + Username: ".*", + Filter: PublicKeyFilter{ + Hostname: ".*", + }, + }, + } + + _, err := c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyTagDuplicated) + assert.NoError(t, err) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyTagNoDuplicated) + assert.NoError(t, err) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyHostname) assert.NoError(t, err) - key := new(models.PublicKey) - result := db.Client().Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyTagDuplicated.TenantID}) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[43:44]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + key := new(PublicKey) + result := c.Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyTagDuplicated.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -105,13 +115,54 @@ func TestMigration44(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[43:44] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + keyTagDuplicated := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag2"}, + }, + }, + } + + keyTagNoDuplicated := &PublicKey{ + Fingerprint: "fingerprint1", + TenantID: "tenant1", + PublicKeyFields: PublicKeyFields{ + Name: "key1", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag3"}, + }, + }, + } + + keyHostname := &PublicKey{ + Fingerprint: "fingerprint2", + TenantID: "tenant2", + PublicKeyFields: PublicKeyFields{ + Name: "key2", + Username: ".*", + Filter: PublicKeyFilter{ + Hostname: ".*", + }, + }, + } + + _, err := c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyTagDuplicated) + assert.NoError(t, err) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyTagNoDuplicated) assert.NoError(t, err) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyHostname) + assert.NoError(t, err) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[43:44]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) - key := new(models.PublicKey) - result := db.Client().Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyTagNoDuplicated.TenantID}) + key := new(PublicKey) + result := c.Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyTagNoDuplicated.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -127,13 +178,54 @@ func TestMigration44(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[43:44] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + keyTagDuplicated := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag2"}, + }, + }, + } + + keyTagNoDuplicated := &PublicKey{ + Fingerprint: "fingerprint1", + TenantID: "tenant1", + PublicKeyFields: PublicKeyFields{ + Name: "key1", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag3"}, + }, + }, + } + + keyHostname := &PublicKey{ + Fingerprint: "fingerprint2", + TenantID: "tenant2", + PublicKeyFields: PublicKeyFields{ + Name: "key2", + Username: ".*", + Filter: PublicKeyFilter{ + Hostname: ".*", + }, + }, + } + + _, err := c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyTagDuplicated) + assert.NoError(t, err) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyTagNoDuplicated) assert.NoError(t, err) + _, err = c.Database("test").Collection("public_keys").InsertOne(context.TODO(), keyHostname) + assert.NoError(t, err) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[43:44]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) - key := new(models.PublicKey) - result := db.Client().Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyHostname.TenantID}) + key := new(PublicKey) + result := c.Database("test").Collection("public_keys").FindOne(context.TODO(), bson.M{"tenant_id": keyHostname.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -145,6 +237,11 @@ func TestMigration44(t *testing.T) { } for _, tc := range cases { - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_45.go b/api/store/mongo/migrations/migration_45.go index 0e8fba73070..5b75e3ff2c6 100644 --- a/api/store/mongo/migrations/migration_45.go +++ b/api/store/mongo/migrations/migration_45.go @@ -12,14 +12,14 @@ import ( var migration45 = migrate.Migration{ Version: 45, Description: "remove duplicated tags on firewall rules", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 45, "action": "Up", }).Info("Applying migration") - _, err := db.Collection("firewall_rules").Aggregate(context.TODO(), + _, err := db.Collection("firewall_rules").Aggregate(ctx, mongo.Pipeline{ { {"$match", bson.M{"filter.tags": bson.M{"$exists": true}}}, @@ -52,8 +52,8 @@ var migration45 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 45, @@ -61,5 +61,5 @@ var migration45 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_45_test.go b/api/store/mongo/migrations/migration_45_test.go index c0cc578e666..4e6af62bdd2 100644 --- a/api/store/mongo/migrations/migration_45_test.go +++ b/api/store/mongo/migrations/migration_45_test.go @@ -5,20 +5,13 @@ import ( "sort" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration45(t *testing.T) { - logrus.Info("Testing Migration 45") - - db := dbtest.DBServer{} - defer db.Stop() - ruleTagDuplicated := &models.FirewallRule{ ID: "id", TenantID: "tenant", @@ -79,11 +72,11 @@ func TestMigration45(t *testing.T) { }, } - _, err := db.Client().Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleTagDuplicated) + _, err := c.Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleTagDuplicated) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleTagNoDuplicated) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleTagNoDuplicated) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleHostname) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(context.TODO(), ruleHostname) assert.NoError(t, err) cases := []struct { @@ -96,12 +89,12 @@ func TestMigration45(t *testing.T) { t.Helper() migrations := GenerateMigrations()[44:45] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) rule := new(models.FirewallRule) - result := db.Client().Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleTagDuplicated.TenantID}) + result := c.Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleTagDuplicated.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(rule) @@ -118,12 +111,12 @@ func TestMigration45(t *testing.T) { t.Helper() migrations := GenerateMigrations()[44:45] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) rule := new(models.FirewallRule) - result := db.Client().Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleTagNoDuplicated.TenantID}) + result := c.Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleTagNoDuplicated.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(rule) @@ -140,12 +133,12 @@ func TestMigration45(t *testing.T) { t.Helper() migrations := GenerateMigrations()[44:45] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) rule := new(models.FirewallRule) - result := db.Client().Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleHostname.TenantID}) + result := c.Database("test").Collection("firewall_rules").FindOne(context.TODO(), bson.M{"tenant_id": ruleHostname.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(rule) @@ -157,6 +150,10 @@ func TestMigration45(t *testing.T) { } for _, tc := range cases { - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + }) } } diff --git a/api/store/mongo/migrations/migration_46.go b/api/store/mongo/migrations/migration_46.go index 07db7cced87..80a063cd83f 100644 --- a/api/store/mongo/migrations/migration_46.go +++ b/api/store/mongo/migrations/migration_46.go @@ -12,7 +12,7 @@ import ( var migration46 = migrate.Migration{ Version: 46, Description: "change public keys with empty username in favor of .* regexp", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 46, @@ -37,8 +37,8 @@ var migration46 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 46, @@ -63,5 +63,5 @@ var migration46 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_46_test.go b/api/store/mongo/migrations/migration_46_test.go index b86fb6af116..a5348588961 100644 --- a/api/store/mongo/migrations/migration_46_test.go +++ b/api/store/mongo/migrations/migration_46_test.go @@ -4,47 +4,32 @@ import ( "context" "sort" "testing" + "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration46(t *testing.T) { - logrus.Info("Testing Migration 46") - - db := dbtest.DBServer{} - defer db.Stop() - - keyUsernameEmpty := &models.PublicKey{ - Fingerprint: "fingerprint", - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Name: "key", - Username: "", - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2", "tag3"}, - }, - }, + type PublicKeyFilter struct { + Hostname string `json:"hostname,omitempty" bson:"hostname,omitempty" validate:"required_without=Tags,excluded_with=Tags,regexp"` + Tags []string `json:"tags,omitempty" bson:"tags,omitempty" validate:"required_without=Hostname,excluded_with=Hostname,max=3,unique,dive,min=3,max=255,alphanum,ascii,excludes=/@&:"` } - keyUsernameRegexp := &models.PublicKey{ - Fingerprint: "fingerprint", - TenantID: "tenant", - PublicKeyFields: models.PublicKeyFields{ - Name: "key", - Username: ".*", - Filter: models.PublicKeyFilter{ - Tags: []string{"tag1", "tag2", "tag3"}, - }, - }, + type PublicKeyFields struct { + Name string `json:"name"` + Username string `json:"username" bson:"username" validate:"regexp"` + Filter PublicKeyFilter `json:"filter" bson:"filter" validate:"required"` } - _, err := db.Client().Database("test").Collection("public_keys").InsertOne(context.Background(), keyUsernameEmpty) - assert.NoError(t, err) + type PublicKey struct { + Data []byte `json:"data"` + Fingerprint string `json:"fingerprint"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` + TenantID string `json:"tenant_id" bson:"tenant_id"` + PublicKeyFields `bson:",inline"` + } cases := []struct { description string @@ -55,13 +40,38 @@ func TestMigration46(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[45:46] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + keyUsernameEmpty := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: "", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag3"}, + }, + }, + } + + keyUsernameRegexp := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag3"}, + }, + }, + } + + _, err := c.Database("test").Collection("public_keys").InsertOne(context.Background(), keyUsernameEmpty) assert.NoError(t, err) - key := new(models.PublicKey) - result := db.Client().Database("test").Collection("public_keys").FindOne(context.Background(), bson.M{"tenant_id": keyUsernameEmpty.TenantID}) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[45:46]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + key := new(PublicKey) + result := c.Database("test").Collection("public_keys").FindOne(context.Background(), bson.M{"tenant_id": keyUsernameEmpty.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -77,13 +87,38 @@ func TestMigration46(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[45:46] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + keyUsernameEmpty := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: "", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag3"}, + }, + }, + } + + keyUsernameRegexp := &PublicKey{ + Fingerprint: "fingerprint", + TenantID: "tenant", + PublicKeyFields: PublicKeyFields{ + Name: "key", + Username: ".*", + Filter: PublicKeyFilter{ + Tags: []string{"tag1", "tag2", "tag3"}, + }, + }, + } + + _, err := c.Database("test").Collection("public_keys").InsertOne(context.Background(), keyUsernameEmpty) assert.NoError(t, err) - key := new(models.PublicKey) - result := db.Client().Database("test").Collection("public_keys").FindOne(context.Background(), bson.M{"tenant_id": keyUsernameRegexp.TenantID}) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[45:46]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + key := new(PublicKey) + result := c.Database("test").Collection("public_keys").FindOne(context.Background(), bson.M{"tenant_id": keyUsernameRegexp.TenantID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -95,6 +130,11 @@ func TestMigration46(t *testing.T) { } for _, tc := range cases { - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_47.go b/api/store/mongo/migrations/migration_47.go index 410cb94848f..2432071f009 100644 --- a/api/store/mongo/migrations/migration_47.go +++ b/api/store/mongo/migrations/migration_47.go @@ -6,6 +6,7 @@ import ( "os" "github.com/shellhub-io/shellhub/pkg/geoip" + "github.com/shellhub-io/shellhub/pkg/geoip/geolite2" "github.com/shellhub-io/shellhub/pkg/models" "github.com/sirupsen/logrus" migrate "github.com/xakep666/mongo-migrate" @@ -16,18 +17,16 @@ import ( var migration47 = migrate.Migration{ Version: 47, Description: "", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 47, "action": "Up", }).Info("Applying migration up") - ctx := context.Background() - var locator geoip.Locator if os.Getenv("GEOIP") == "true" { - locator, _ = geoip.NewGeoLite2() + locator, _ = geolite2.NewLocator(ctx, geolite2.FetchFromLicenseKey(os.Getenv("MAXMIND_LICENSE"))) } else { locator = geoip.NewNullGeoLite() } @@ -55,8 +54,8 @@ var migration47 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 47, @@ -81,5 +80,5 @@ var migration47 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_47_test.go b/api/store/mongo/migrations/migration_47_test.go index 492bffe8379..e01592a01a1 100644 --- a/api/store/mongo/migrations/migration_47_test.go +++ b/api/store/mongo/migrations/migration_47_test.go @@ -5,28 +5,13 @@ import ( "os" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration47(t *testing.T) { - logrus.Info("Testing Migration 47") - - db := dbtest.DBServer{} - defer db.Stop() - - sessionWithoutPossition := &models.Session{ - UID: "test", - IPAddress: "201.182.197.68", - } - - _, err := db.Client().Database("test").Collection("sessions").InsertOne(context.Background(), sessionWithoutPossition) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -36,13 +21,19 @@ func TestMigration47(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[46:47] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + sessionWithoutPossition := &models.Session{ + UID: "test", + IPAddress: "201.182.197.68", + } + + _, err := c.Database("test").Collection("sessions").InsertOne(context.Background(), sessionWithoutPossition) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[46:47]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + key := new(models.Session) - result := db.Client().Database("test").Collection("sessions").FindOne(context.Background(), bson.M{"uid": sessionWithoutPossition.UID}) + result := c.Database("test").Collection("sessions").FindOne(context.Background(), bson.M{"uid": sessionWithoutPossition.UID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -60,13 +51,19 @@ func TestMigration47(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[46:47] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + sessionWithoutPossition := &models.Session{ + UID: "test", + IPAddress: "201.182.197.68", + } + + _, err := c.Database("test").Collection("sessions").InsertOne(context.Background(), sessionWithoutPossition) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[46:47]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + key := new(models.Session) - result := db.Client().Database("test").Collection("sessions").FindOne(context.Background(), bson.M{"uid": sessionWithoutPossition.UID}) + result := c.Database("test").Collection("sessions").FindOne(context.Background(), bson.M{"uid": sessionWithoutPossition.UID}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -78,6 +75,11 @@ func TestMigration47(t *testing.T) { } for _, tc := range cases { - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_48.go b/api/store/mongo/migrations/migration_48.go index 81a65a977a2..d9aa21daf94 100644 --- a/api/store/mongo/migrations/migration_48.go +++ b/api/store/mongo/migrations/migration_48.go @@ -76,7 +76,7 @@ func invertFirewallRulePriority(db *mongo.Database) error { var migration48 = migrate.Migration{ Version: 48, Description: "invert Firewall priority", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 48, @@ -84,8 +84,8 @@ var migration48 = migrate.Migration{ }).Info("Applying migration up") return invertFirewallRulePriority(db) - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 48, @@ -93,5 +93,5 @@ var migration48 = migrate.Migration{ }).Info("Applying migration down") return invertFirewallRulePriority(db) - }, + }), } diff --git a/api/store/mongo/migrations/migration_48_test.go b/api/store/mongo/migrations/migration_48_test.go index b8a5fbfd76a..f4b759c1cb2 100644 --- a/api/store/mongo/migrations/migration_48_test.go +++ b/api/store/mongo/migrations/migration_48_test.go @@ -4,56 +4,13 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration48(t *testing.T) { - logrus.Info("Testing Migration 48") - - ctx := context.Background() - - db := dbtest.DBServer{} - defer db.Stop() - - namespace := models.Namespace{ - TenantID: "tenant", - } - - rule0 := models.FirewallRule{ - TenantID: namespace.TenantID, - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 0, - }, - } - - rule1 := models.FirewallRule{ - TenantID: namespace.TenantID, - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 1, - }, - } - - rule2 := models.FirewallRule{ - TenantID: namespace.TenantID, - FirewallRuleFields: models.FirewallRuleFields{ - Priority: 2, - }, - } - - _, err := db.Client().Database("test").Collection("namespaces").InsertOne(ctx, namespace) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("firewall_rules").InsertOne(ctx, rule0) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("firewall_rules").InsertOne(ctx, rule1) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("firewall_rules").InsertOne(ctx, rule2) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -63,18 +20,50 @@ func TestMigration48(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[47:48] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + namespace := models.Namespace{ + TenantID: "tenant", + } + + rule0 := models.FirewallRule{ + TenantID: namespace.TenantID, + FirewallRuleFields: models.FirewallRuleFields{ + Priority: 0, + }, + } + + rule1 := models.FirewallRule{ + TenantID: namespace.TenantID, + FirewallRuleFields: models.FirewallRuleFields{ + Priority: 1, + }, + } + + rule2 := models.FirewallRule{ + TenantID: namespace.TenantID, + FirewallRuleFields: models.FirewallRuleFields{ + Priority: 2, + }, + } + + ctx := context.Background() + + _, err := c.Database("test").Collection("namespaces").InsertOne(ctx, namespace) + assert.NoError(t, err) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(ctx, rule0) + assert.NoError(t, err) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(ctx, rule1) + assert.NoError(t, err) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(ctx, rule2) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[47:48]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + key := new(models.FirewallRule) - result := db.Client().Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"tenant_id": namespace.TenantID}) + result := c.Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"tenant_id": namespace.TenantID}) assert.NoError(t, result.Err()) - err = result.Decode(key) - assert.NoError(t, err) - + assert.NoError(t, result.Decode(key)) assert.Equal(t, 2, key.Priority) }, }, @@ -83,24 +72,61 @@ func TestMigration48(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[47:48] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + namespace := models.Namespace{ + TenantID: "tenant", + } + + rule0 := models.FirewallRule{ + TenantID: namespace.TenantID, + FirewallRuleFields: models.FirewallRuleFields{ + Priority: 0, + }, + } + + rule1 := models.FirewallRule{ + TenantID: namespace.TenantID, + FirewallRuleFields: models.FirewallRuleFields{ + Priority: 1, + }, + } + + rule2 := models.FirewallRule{ + TenantID: namespace.TenantID, + FirewallRuleFields: models.FirewallRuleFields{ + Priority: 2, + }, + } + + ctx := context.Background() + + _, err := c.Database("test").Collection("namespaces").InsertOne(ctx, namespace) + assert.NoError(t, err) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(ctx, rule0) + assert.NoError(t, err) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(ctx, rule1) + assert.NoError(t, err) + _, err = c.Database("test").Collection("firewall_rules").InsertOne(ctx, rule2) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[47:48]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + key := new(models.FirewallRule) - result := db.Client().Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"tenant_id": namespace.TenantID}) + result := c.Database("test").Collection("firewall_rules").FindOne(ctx, bson.M{"tenant_id": namespace.TenantID}) assert.NoError(t, result.Err()) - err = result.Decode(key) - assert.NoError(t, err) - + assert.NoError(t, result.Decode(key)) assert.Equal(t, 0, key.Priority) }, }, } for _, tc := range cases { - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_49.go b/api/store/mongo/migrations/migration_49.go index b1af19a612c..314c48dd903 100644 --- a/api/store/mongo/migrations/migration_49.go +++ b/api/store/mongo/migrations/migration_49.go @@ -12,7 +12,7 @@ import ( var migration49 = migrate.Migration{ Version: 49, Description: "set the number of namespaces owned by each user", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 49, @@ -59,8 +59,8 @@ var migration49 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 49, @@ -85,5 +85,5 @@ var migration49 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_49_test.go b/api/store/mongo/migrations/migration_49_test.go index e45313a3494..e60cebfd4d6 100644 --- a/api/store/mongo/migrations/migration_49_test.go +++ b/api/store/mongo/migrations/migration_49_test.go @@ -4,9 +4,7 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" @@ -14,47 +12,6 @@ import ( ) func TestMigration49(t *testing.T) { - logrus.Info("Testing Migration 49") - - db := dbtest.DBServer{} - defer db.Stop() - - user1ID, err := primitive.ObjectIDFromHex("507f1f77bcf86cd799439011") - assert.NoError(t, err) - user1 := &models.User{ - ID: user1ID.String(), - } - - user2ID, err := primitive.ObjectIDFromHex("507f1f77bcf86cd799439012") - assert.NoError(t, err) - user2 := &models.User{ - ID: user2ID.String(), - } - - namespace1 := &models.Namespace{ - Name: "namespace1", - Owner: user1ID.String(), - } - namespace2 := &models.Namespace{ - Name: "namespace2", - Owner: user1ID.String(), - } - namespace3 := &models.Namespace{ - Name: "namespace3", - Owner: user2ID.String(), - } - - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user1) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user2) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) - assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -64,19 +21,53 @@ func TestMigration49(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[48:49] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + user1ID, err := primitive.ObjectIDFromHex("507f1f77bcf86cd799439011") assert.NoError(t, err) + user1 := &models.User{ + ID: user1ID.String(), + } + + user2ID, err := primitive.ObjectIDFromHex("507f1f77bcf86cd799439012") + assert.NoError(t, err) + user2 := &models.User{ + ID: user2ID.String(), + } + + namespace1 := &models.Namespace{ + Name: "namespace1", + Owner: user1ID.String(), + } + namespace2 := &models.Namespace{ + Name: "namespace2", + Owner: user1ID.String(), + } + namespace3 := &models.Namespace{ + Name: "namespace3", + Owner: user2ID.String(), + } + + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user1) + assert.NoError(t, err) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user2) + assert.NoError(t, err) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) + assert.NoError(t, err) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) + assert.NoError(t, err) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) + assert.NoError(t, err) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[48:49]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) user := new(models.User) - result := db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) + result := c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) assert.NoError(t, result.Err()) err = result.Decode(user) assert.NoError(t, err) - assert.Equal(t, 2, user.Namespaces) + // assert.Equal(t, 2, user.Namespaces) }, }, { @@ -84,25 +75,64 @@ func TestMigration49(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[48:49] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + user1ID, err := primitive.ObjectIDFromHex("507f1f77bcf86cd799439011") + assert.NoError(t, err) + user1 := &models.User{ + ID: user1ID.String(), + } + + user2ID, err := primitive.ObjectIDFromHex("507f1f77bcf86cd799439012") assert.NoError(t, err) + user2 := &models.User{ + ID: user2ID.String(), + } + + namespace1 := &models.Namespace{ + Name: "namespace1", + Owner: user1ID.String(), + } + namespace2 := &models.Namespace{ + Name: "namespace2", + Owner: user1ID.String(), + } + namespace3 := &models.Namespace{ + Name: "namespace3", + Owner: user2ID.String(), + } + + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user1) + assert.NoError(t, err) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user2) + assert.NoError(t, err) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) + assert.NoError(t, err) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) + assert.NoError(t, err) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) + assert.NoError(t, err) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[48:49]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) user := new(models.User) - result := db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) + result := c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) assert.NoError(t, result.Err()) err = result.Decode(user) assert.NoError(t, err) - assert.Equal(t, 0, user.Namespaces) + // assert.Equal(t, 0, user.Namespaces) }, }, } for _, test := range cases { tc := test - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_4_test.go b/api/store/mongo/migrations/migration_4_test.go index da0ad3b8ad9..ff0a8306205 100644 --- a/api/store/mongo/migrations/migration_4_test.go +++ b/api/store/mongo/migrations/migration_4_test.go @@ -4,19 +4,16 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration4(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() - - logrus.Info("Testing Migration 4 - Test if the column version was renamed to info.version") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) deviceInfo := models.DeviceInfo{ ID: "1", @@ -27,18 +24,18 @@ func TestMigration4(t *testing.T) { Info: &deviceInfo, } - _, err := db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err := c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) var afterMigrateDevice *models.Device - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"info": &deviceInfo}).Decode(&afterMigrateDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"info": &deviceInfo}).Decode(&afterMigrateDevice) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:4]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:4]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) type DeviceInfo struct { @@ -51,6 +48,6 @@ func TestMigration4(t *testing.T) { } var migratedDevice *Device - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"info": &deviceInfo}).Decode(&migratedDevice) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"info": &deviceInfo}).Decode(&migratedDevice) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_5.go b/api/store/mongo/migrations/migration_5.go index e47288e6a71..5899b41e3e4 100644 --- a/api/store/mongo/migrations/migration_5.go +++ b/api/store/mongo/migrations/migration_5.go @@ -13,7 +13,7 @@ import ( var migration5 = migrate.Migration{ Version: 5, Description: "Set the email as unique on users collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 5, @@ -23,18 +23,18 @@ var migration5 = migrate.Migration{ Keys: bson.D{{"email", 1}}, Options: options.Index().SetName("email").SetUnique(true), } - _, err := db.Collection("users").Indexes().CreateOne(context.TODO(), mod) + _, err := db.Collection("users").Indexes().CreateOne(ctx, mod) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 5, "action": "Down", }).Info("Applying migration") - _, err := db.Collection("users").Indexes().DropOne(context.TODO(), "email") + _, err := db.Collection("users").Indexes().DropOne(ctx, "email") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_50.go b/api/store/mongo/migrations/migration_50.go index 5d0b7d66619..644b068ad30 100644 --- a/api/store/mongo/migrations/migration_50.go +++ b/api/store/mongo/migrations/migration_50.go @@ -13,7 +13,7 @@ import ( var migration50 = migrate.Migration{ Version: 50, Description: "set max number of namespaces per user", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 50, @@ -82,8 +82,8 @@ var migration50 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 50, @@ -108,5 +108,5 @@ var migration50 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_50_test.go b/api/store/mongo/migrations/migration_50_test.go index 9858f98dc3e..e4baa2cb78f 100644 --- a/api/store/mongo/migrations/migration_50_test.go +++ b/api/store/mongo/migrations/migration_50_test.go @@ -4,11 +4,9 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/envs" envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" @@ -16,11 +14,6 @@ import ( ) func TestMigration50(t *testing.T) { - logrus.Info("Testing Migration 50") - - db := dbtest.DBServer{} - defer db.Stop() - mock := &envMocks.Backend{} envs.DefaultBackend = mock @@ -62,29 +55,29 @@ func TestMigration50(t *testing.T) { { "Success to apply up on migration 50 when it is a ShellHub Cloud instance", func() { - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user1) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user2) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user2) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) assert.NoError(t, err) }, func() (int, error) { mock.On("Get", "SHELLHUB_CLOUD").Return("true").Once() migrations := GenerateMigrations()[49:50] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) if err != nil { return 0, err } user := new(models.User) - result := db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) + result := c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) if err != nil { return 0, err } @@ -98,40 +91,40 @@ func TestMigration50(t *testing.T) { }, 2, func() { - err = db.Client().Database("test").Collection("users").Drop(context.TODO()) + err = c.Database("test").Collection("users").Drop(context.TODO()) assert.NoError(t, err) - err = db.Client().Database("test").Collection("namespaces").Drop(context.TODO()) + err = c.Database("test").Collection("namespaces").Drop(context.TODO()) assert.NoError(t, err) - err = db.Client().Database("test").Collection("migrations").Drop(context.TODO()) + err = c.Database("test").Collection("migrations").Drop(context.TODO()) assert.NoError(t, err) }, }, { "Success to apply up on migration 50 when it is a ShellHub Community instance", func() { - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user1) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user2) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user2) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) assert.NoError(t, err) }, func() (int, error) { mock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() migrations := GenerateMigrations()[49:50] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) if err != nil { return 0, err } user := new(models.User) - result := db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) + result := c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) if err != nil { return 0, err } @@ -145,38 +138,38 @@ func TestMigration50(t *testing.T) { }, -1, func() { - err = db.Client().Database("test").Collection("users").Drop(context.TODO()) + err = c.Database("test").Collection("users").Drop(context.TODO()) assert.NoError(t, err) - err = db.Client().Database("test").Collection("namespaces").Drop(context.TODO()) + err = c.Database("test").Collection("namespaces").Drop(context.TODO()) assert.NoError(t, err) - err = db.Client().Database("test").Collection("migrations").Drop(context.TODO()) + err = c.Database("test").Collection("migrations").Drop(context.TODO()) assert.NoError(t, err) }, }, { "Success to apply down on migration 50", func() { - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user1) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user2) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user2) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace2) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) + _, err = c.Database("test").Collection("namespaces").InsertOne(context.TODO(), namespace3) assert.NoError(t, err) }, func() (int, error) { migrations := GenerateMigrations()[49:50] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) if err != nil { return 0, err } user := new(models.User) - result := db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) + result := c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"_id": user1ID.String()}) if err != nil { return 0, err } @@ -190,11 +183,11 @@ func TestMigration50(t *testing.T) { }, 0, func() { - err = db.Client().Database("test").Collection("users").Drop(context.TODO()) + err = c.Database("test").Collection("users").Drop(context.TODO()) assert.NoError(t, err) - err = db.Client().Database("test").Collection("namespaces").Drop(context.TODO()) + err = c.Database("test").Collection("namespaces").Drop(context.TODO()) assert.NoError(t, err) - err = db.Client().Database("test").Collection("migrations").Drop(context.TODO()) + err = c.Database("test").Collection("migrations").Drop(context.TODO()) assert.NoError(t, err) }, }, @@ -203,6 +196,10 @@ func TestMigration50(t *testing.T) { for _, test := range cases { tc := test t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.before() actual, err := tc.test() diff --git a/api/store/mongo/migrations/migration_51.go b/api/store/mongo/migrations/migration_51.go index aaa6ed9be0e..f239830df20 100644 --- a/api/store/mongo/migrations/migration_51.go +++ b/api/store/mongo/migrations/migration_51.go @@ -13,7 +13,7 @@ import ( var migration51 = migrate.Migration{ Version: 51, Description: "create index for name on devices", - Up: func(database *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 51, @@ -21,7 +21,7 @@ var migration51 = migrate.Migration{ }).Info("Applying migration up") Name := "name" - if _, err := database.Collection("devices").Indexes().CreateOne(context.Background(), mongo.IndexModel{ + if _, err := db.Collection("devices").Indexes().CreateOne(context.Background(), mongo.IndexModel{ Keys: bson.M{ Name: 1, }, @@ -33,8 +33,8 @@ var migration51 = migrate.Migration{ } return nil - }, - Down: func(database *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 51, @@ -42,10 +42,10 @@ var migration51 = migrate.Migration{ }).Info("Applying migration down") Name := "name" - if _, err := database.Collection("devices").Indexes().DropOne(context.Background(), Name); err != nil { + if _, err := db.Collection("devices").Indexes().DropOne(context.Background(), Name); err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_51_test.go b/api/store/mongo/migrations/migration_51_test.go index 5568876c6d2..b3addea0721 100644 --- a/api/store/mongo/migrations/migration_51_test.go +++ b/api/store/mongo/migrations/migration_51_test.go @@ -5,23 +5,16 @@ import ( "errors" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/envs" envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration51(t *testing.T) { - logrus.Info("Testing Migration 51") - const Name string = "name" - db := dbtest.DBServer{} - defer db.Stop() - mock := &envMocks.Backend{} envs.DefaultBackend = mock @@ -35,13 +28,13 @@ func TestMigration51(t *testing.T) { mock.On("Get", "SHELLHUB_CLOUD").Return("true").Once() migrations := GenerateMigrations()[50:51] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("devices").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("devices").Indexes().List(context.Background()) if err != nil { return err } @@ -69,13 +62,13 @@ func TestMigration51(t *testing.T) { "Success to apply down on migration 51", func() error { migrations := GenerateMigrations()[50:51] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("devices").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("devices").Indexes().List(context.Background()) if err != nil { return errors.New("index not dropped") } @@ -104,6 +97,10 @@ func TestMigration51(t *testing.T) { for _, test := range cases { tc := test t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + err := tc.test() assert.NoError(t, err) }) diff --git a/api/store/mongo/migrations/migration_52.go b/api/store/mongo/migrations/migration_52.go index f4686de367d..ac1874168c2 100644 --- a/api/store/mongo/migrations/migration_52.go +++ b/api/store/mongo/migrations/migration_52.go @@ -12,7 +12,7 @@ import ( var migration52 = migrate.Migration{ Version: 52, Description: "add marketing field to users", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 52, @@ -37,8 +37,8 @@ var migration52 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 52, @@ -63,5 +63,5 @@ var migration52 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_52_test.go b/api/store/mongo/migrations/migration_52_test.go index 00ac17f03f1..81c4e683238 100644 --- a/api/store/mongo/migrations/migration_52_test.go +++ b/api/store/mongo/migrations/migration_52_test.go @@ -4,25 +4,15 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration52(t *testing.T) { - logrus.Info("Testing Migration 52") - - db := dbtest.DBServer{} - defer db.Stop() - user := models.User{} - _, err := db.Client().Database("test").Collection("users").InsertOne(context.Background(), user) - assert.NoError(t, err) - cases := []struct { description string Test func(t *testing.T) @@ -32,13 +22,14 @@ func TestMigration52(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[51:52] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + _, err := c.Database("test").Collection("users").InsertOne(context.Background(), user) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[51:52]...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + key := new(models.User) - result := db.Client().Database("test").Collection("users").FindOne(context.Background(), bson.M{}) + result := c.Database("test").Collection("users").FindOne(context.Background(), bson.M{}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -52,13 +43,14 @@ func TestMigration52(t *testing.T) { func(t *testing.T) { t.Helper() - migrations := GenerateMigrations()[51:52] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + _, err := c.Database("test").Collection("users").InsertOne(context.Background(), user) assert.NoError(t, err) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[51:52]...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + key := new(models.User) - result := db.Client().Database("test").Collection("users").FindOne(context.Background(), bson.M{}) + result := c.Database("test").Collection("users").FindOne(context.Background(), bson.M{}) assert.NoError(t, result.Err()) err = result.Decode(key) @@ -70,6 +62,11 @@ func TestMigration52(t *testing.T) { } for _, tc := range cases { - t.Run(tc.description, tc.Test) + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.Test(t) + }) } } diff --git a/api/store/mongo/migrations/migration_53.go b/api/store/mongo/migrations/migration_53.go index f4509633c2b..3a9f3b6736b 100644 --- a/api/store/mongo/migrations/migration_53.go +++ b/api/store/mongo/migrations/migration_53.go @@ -13,7 +13,7 @@ import ( var migration53 = migrate.Migration{ Version: 53, Description: "create index to announcement ID", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 53, @@ -36,8 +36,8 @@ var migration53 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 53, @@ -51,5 +51,5 @@ var migration53 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_54.go b/api/store/mongo/migrations/migration_54.go index f48e01184eb..01716215fd8 100644 --- a/api/store/mongo/migrations/migration_54.go +++ b/api/store/mongo/migrations/migration_54.go @@ -13,7 +13,7 @@ import ( var migration54 = migrate.Migration{ Version: 54, Description: "create index to devices' tenant_id and status", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 54, @@ -36,8 +36,8 @@ var migration54 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 54, @@ -50,5 +50,5 @@ var migration54 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_54_test.go b/api/store/mongo/migrations/migration_54_test.go index 810c62a114f..94e1cb79efe 100644 --- a/api/store/mongo/migrations/migration_54_test.go +++ b/api/store/mongo/migrations/migration_54_test.go @@ -5,21 +5,14 @@ import ( "errors" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration54(t *testing.T) { - logrus.Info("Testing Migration 54") - const Name string = "tenant_id_1_status_1" - db := dbtest.DBServer{} - defer db.Stop() - cases := []struct { description string test func() error @@ -28,13 +21,13 @@ func TestMigration54(t *testing.T) { "Success to apply up on migration 54", func() error { migrations := GenerateMigrations()[53:54] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("devices").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("devices").Indexes().List(context.Background()) if err != nil { return err } @@ -62,13 +55,13 @@ func TestMigration54(t *testing.T) { "Success to apply down on migration 54", func() error { migrations := GenerateMigrations()[53:54] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("devices").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("devices").Indexes().List(context.Background()) if err != nil { return errors.New("index not dropped") } @@ -97,6 +90,10 @@ func TestMigration54(t *testing.T) { for _, test := range cases { tc := test t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + err := tc.test() assert.NoError(t, err) }) diff --git a/api/store/mongo/migrations/migration_55.go b/api/store/mongo/migrations/migration_55.go index d10be343a00..ae4dedb84ee 100644 --- a/api/store/mongo/migrations/migration_55.go +++ b/api/store/mongo/migrations/migration_55.go @@ -14,7 +14,7 @@ import ( var migration55 = migrate.Migration{ Version: 55, Description: "create indexes on removed_devices for tenant_id, tenant_id and uid and timestamp", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 55, @@ -65,8 +65,8 @@ var migration55 = migrate.Migration{ } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 55, @@ -89,5 +89,5 @@ var migration55 = migrate.Migration{ } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_55_test.go b/api/store/mongo/migrations/migration_55_test.go index 05ca6ea7734..7aa615dc5a4 100644 --- a/api/store/mongo/migrations/migration_55_test.go +++ b/api/store/mongo/migrations/migration_55_test.go @@ -7,22 +7,15 @@ import ( "go.mongodb.org/mongo-driver/bson" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration55(t *testing.T) { - logrus.Info("Testing Migration 55") - fieldNameTenantID := "tenant_id_1" fieldNameTenantIDUID := "tenant_id_1_uid_1" fieldNameTimestamp := "timestamp_1" - db := dbtest.DBServer{} - defer db.Stop() - cases := []struct { description string test func() error @@ -31,13 +24,13 @@ func TestMigration55(t *testing.T) { "Success to apply up on migration 55", func() error { migrations := GenerateMigrations()[54:55] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("removed_devices").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("removed_devices").Indexes().List(context.Background()) if err != nil { return err } @@ -72,13 +65,13 @@ func TestMigration55(t *testing.T) { "Success to apply down on migration 55", func() error { migrations := GenerateMigrations()[54:55] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("removed_devices").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("removed_devices").Indexes().List(context.Background()) if err != nil { return errors.New("index not dropped") } @@ -114,6 +107,10 @@ func TestMigration55(t *testing.T) { for _, test := range cases { tc := test t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + err := tc.test() assert.NoError(t, err) }) diff --git a/api/store/mongo/migrations/migration_56.go b/api/store/mongo/migrations/migration_56.go index 166eac816b5..6bcc1ff672e 100644 --- a/api/store/mongo/migrations/migration_56.go +++ b/api/store/mongo/migrations/migration_56.go @@ -13,7 +13,7 @@ import ( var migration56 = migrate.Migration{ Version: 56, Description: "create index for public url address on devices", - Up: func(database *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 56, @@ -23,7 +23,7 @@ var migration56 = migrate.Migration{ unique := true sparse := true - if _, err := database.Collection("devices").Indexes().CreateOne(context.Background(), mongo.IndexModel{ + if _, err := db.Collection("devices").Indexes().CreateOne(context.Background(), mongo.IndexModel{ Keys: bson.M{ field: 1, }, @@ -37,8 +37,8 @@ var migration56 = migrate.Migration{ } return nil - }, - Down: func(database *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 56, @@ -46,10 +46,10 @@ var migration56 = migrate.Migration{ }).Info("Applying migration down") field := "public_url_address" - if _, err := database.Collection("devices").Indexes().DropOne(context.Background(), field); err != nil { + if _, err := db.Collection("devices").Indexes().DropOne(context.Background(), field); err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_56_test.go b/api/store/mongo/migrations/migration_56_test.go index 0acfb404a19..6dd65bed39d 100644 --- a/api/store/mongo/migrations/migration_56_test.go +++ b/api/store/mongo/migrations/migration_56_test.go @@ -5,21 +5,14 @@ import ( "errors" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration56(t *testing.T) { - logrus.Info("Testing Migration 56") - const field string = "public_url_address" - db := dbtest.DBServer{} - defer db.Stop() - cases := []struct { description string test func() error @@ -28,13 +21,13 @@ func TestMigration56(t *testing.T) { "Success to apply up on migration 56", func() error { migrations := GenerateMigrations()[55:56] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("devices").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("devices").Indexes().List(context.Background()) if err != nil { return err } @@ -62,13 +55,13 @@ func TestMigration56(t *testing.T) { "Success to apply down on migration 56", func() error { migrations := GenerateMigrations()[55:56] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("devices").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("devices").Indexes().List(context.Background()) if err != nil { return errors.New("index not dropped") } @@ -97,6 +90,10 @@ func TestMigration56(t *testing.T) { for _, test := range cases { tc := test t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + err := tc.test() assert.NoError(t, err) }) diff --git a/api/store/mongo/migrations/migration_57.go b/api/store/mongo/migrations/migration_57.go index 432ccfd8134..470f8d0e68f 100644 --- a/api/store/mongo/migrations/migration_57.go +++ b/api/store/mongo/migrations/migration_57.go @@ -12,7 +12,7 @@ import ( var migration57 = migrate.Migration{ Version: 57, Description: "update billing state to status and its values", - Up: func(database *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 57, @@ -79,14 +79,14 @@ var migration57 = migrate.Migration{ }, } - _, err := database.Collection("namespaces").Aggregate(context.Background(), pipeline) + _, err := db.Collection("namespaces").Aggregate(context.Background(), pipeline) if err != nil { return err } return nil - }, - Down: func(database *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 57, @@ -147,11 +147,11 @@ var migration57 = migrate.Migration{ }, } - _, err := database.Collection("namespaces").Aggregate(context.Background(), pipeline) + _, err := db.Collection("namespaces").Aggregate(context.Background(), pipeline) if err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_57_test.go b/api/store/mongo/migrations/migration_57_test.go index f0627fe54cd..205cba5b5f6 100644 --- a/api/store/mongo/migrations/migration_57_test.go +++ b/api/store/mongo/migrations/migration_57_test.go @@ -6,16 +6,16 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration57(t *testing.T) { - logrus.Info("Testing Migration 57") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) type PaymentFailed struct { Status bool `json:"status" bson:"status,omitempty"` @@ -41,54 +41,34 @@ func TestMigration57(t *testing.T) { Billing *Billing `json:"billing" bson:"billing,omitempty"` } - db := dbtest.DBServer{} - defer db.Stop() - cases := []struct { - description string - requiredMocks func() (func() error, error) - run func() error - check func() (string, error) - expected string + description string + setup func() error + run func() error + check func() (string, error) + expected string }{ { description: "Success to apply up on migration 57 when namespace has billing", - requiredMocks: func() (func() error, error) { - _, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), Namespace{ - TenantID: "tenant", + setup: func() error { + _, err := c.Database("test").Collection("namespaces").InsertOne(context.TODO(), Namespace{ + TenantID: "00000000-0000-0000-0000-000000000001", Billing: &Billing{ State: "processed", }, }) - if err != nil { - return nil, err - } - - return func() error { - _, err := db.Client().Database("test").Collection("namespaces").DeleteOne(context.TODO(), bson.M{ - "tenant_id": "tenant", - }) - if err != nil { - return err - } - return nil - }, nil + return err }, run: func() error { - migrations := GenerateMigrations()[56:57] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) - if err != nil { - return err - } + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[56:57]...) - return nil + return migrates.Up(context.Background(), migrate.AllAvailable) }, check: func() (string, error) { namespace := new(models.Namespace) - err := db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{ - "tenant_id": "tenant", + err := c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{ + "tenant_id": "00000000-0000-0000-0000-000000000001", }).Decode(&namespace) if err != nil { return "", err @@ -100,41 +80,21 @@ func TestMigration57(t *testing.T) { }, { description: "Success to apply up on migration 57 when namespace has no billing", - requiredMocks: func() (func() error, error) { - _, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), Namespace{ - TenantID: "tenant", + setup: func() error { + _, err := c.Database("test").Collection("namespaces").InsertOne(context.TODO(), Namespace{ + TenantID: "00000000-0000-0000-0000-000000000002", }) - if err != nil { - return nil, err - } - - return func() error { - _, err := db.Client().Database("test").Collection("namespaces").DeleteOne(context.TODO(), bson.M{ - "tenant_id": "tenant", - }) - if err != nil { - return err - } - return nil - }, nil + return err }, run: func() error { - migrations := GenerateMigrations()[56:57] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) - if err != nil { - return err - } + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[56:57]...) - return nil + return migrates.Up(context.Background(), migrate.AllAvailable) }, check: func() (string, error) { namespace := new(models.Namespace) - err := db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{ - "tenant_id": "tenant", - }).Decode(&namespace) - if err != nil { + if err := c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-0000-0000-000000000002"}).Decode(&namespace); err != nil { return "", err } @@ -148,42 +108,25 @@ func TestMigration57(t *testing.T) { }, { description: "Success to apply down on migration 57 when namespace has billing", - requiredMocks: func() (func() error, error) { - _, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), &models.Namespace{ - TenantID: "tenant", + setup: func() error { + _, err := c.Database("test").Collection("namespaces").InsertOne(context.TODO(), &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000003", Billing: &models.Billing{ Status: "active", }, }) - if err != nil { - return nil, err - } - - return func() error { - _, err := db.Client().Database("test").Collection("namespaces").DeleteOne(context.TODO(), bson.M{ - "tenant_id": "tenant", - }) - if err != nil { - return err - } - return nil - }, nil + return err }, run: func() error { - migrations := GenerateMigrations()[56:57] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) - if err != nil { - return err - } + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[56:57]...) - return nil + return migrates.Down(context.Background(), migrate.AllAvailable) }, check: func() (string, error) { namespace := new(Namespace) - err := db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{ - "tenant_id": "tenant", + err := c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{ + "tenant_id": "00000000-0000-0000-0000-000000000003", }).Decode(&namespace) if err != nil { return "", err @@ -195,39 +138,22 @@ func TestMigration57(t *testing.T) { }, { description: "Success to apply down on migration 57 when namespace has no billing", - requiredMocks: func() (func() error, error) { - _, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), &models.Namespace{ - TenantID: "tenant", + setup: func() error { + _, err := c.Database("test").Collection("namespaces").InsertOne(context.TODO(), &models.Namespace{ + TenantID: "00000000-0000-0000-0000-000000000004", }) - if err != nil { - return nil, err - } - - return func() error { - _, err := db.Client().Database("test").Collection("namespaces").DeleteOne(context.TODO(), bson.M{ - "tenant_id": "tenant", - }) - if err != nil { - return err - } - return nil - }, nil + return err }, run: func() error { - migrations := GenerateMigrations()[56:57] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) - if err != nil { - return err - } + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[56:57]...) - return nil + return migrates.Down(context.Background(), migrate.AllAvailable) }, check: func() (string, error) { namespace := new(Namespace) - err := db.Client().Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{ - "tenant_id": "tenant", + err := c.Database("test").Collection("namespaces").FindOne(context.TODO(), bson.M{ + "tenant_id": "00000000-0000-0000-0000-000000000004", }).Decode(&namespace) if err != nil { return "", err @@ -246,18 +172,12 @@ func TestMigration57(t *testing.T) { for _, test := range cases { tc := test t.Run(tc.description, func(t *testing.T) { - teardown, err := tc.requiredMocks() - assert.NoError(t, err) - - err = tc.run() - assert.NoError(t, err) + assert.NoError(t, tc.setup()) + assert.NoError(t, tc.run()) result, err := tc.check() assert.Equal(t, tc.expected, result) assert.NoError(t, err) - - err = teardown() - assert.NoError(t, err) }) } } diff --git a/api/store/mongo/migrations/migration_58.go b/api/store/mongo/migrations/migration_58.go index 82730baa2f9..9732ac824b5 100644 --- a/api/store/mongo/migrations/migration_58.go +++ b/api/store/mongo/migrations/migration_58.go @@ -12,7 +12,7 @@ import ( var migration58 = migrate.Migration{ Version: 58, Description: "", - Up: func(database *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 58, @@ -45,14 +45,14 @@ var migration58 = migrate.Migration{ }, } - _, err := database.Collection("namespaces").Aggregate(context.Background(), pipeline) + _, err := db.Collection("namespaces").Aggregate(context.Background(), pipeline) if err != nil { return err } return nil - }, - Down: func(database *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 58, @@ -85,11 +85,11 @@ var migration58 = migrate.Migration{ }, } - _, err := database.Collection("namespaces").Aggregate(context.Background(), pipeline) + _, err := db.Collection("namespaces").Aggregate(context.Background(), pipeline) if err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_59.go b/api/store/mongo/migrations/migration_59.go index f9bc78590ad..554c6c1c6e5 100644 --- a/api/store/mongo/migrations/migration_59.go +++ b/api/store/mongo/migrations/migration_59.go @@ -12,7 +12,7 @@ import ( var migration59 = migrate.Migration{ Version: 59, Description: "Converts all 'name' field values in the 'users' collection to lowercase.", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 59, @@ -20,7 +20,7 @@ var migration59 = migrate.Migration{ }).Info("Starting migration Up action.") _, err := db.Collection("users").UpdateMany( - context.TODO(), + ctx, bson.M{}, []bson.M{ { @@ -53,8 +53,8 @@ var migration59 = migrate.Migration{ }).Info("Completed migration Up action successfully.") return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 59, @@ -68,5 +68,5 @@ var migration59 = migrate.Migration{ }).Info("Completed migration Down action successfully.") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_59_test.go b/api/store/mongo/migrations/migration_59_test.go index e2cacdb04ee..e3b65ef77c4 100644 --- a/api/store/mongo/migrations/migration_59_test.go +++ b/api/store/mongo/migrations/migration_59_test.go @@ -6,20 +6,14 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration59(t *testing.T) { - logrus.Info("Testing Migration 59") - ctx := context.TODO() - db := dbtest.DBServer{} - defer db.Stop() type Expected struct { user *models.User @@ -35,7 +29,7 @@ func TestMigration59(t *testing.T) { { description: "Success to apply up on migration 59", setup: func() (func() error, error) { - if _, err := db.Client().Database("test").Collection("users").InsertOne(ctx, models.User{ + if _, err := c.Database("test").Collection("users").InsertOne(ctx, models.User{ ID: "652594bcc7b001c6f298df48", CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), @@ -44,18 +38,20 @@ func TestMigration59(t *testing.T) { Email: "JohnDoe@test.com", Username: "John Doe", }, - UserPassword: models.NewUserPassword(""), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, }); err != nil { return nil, err } user := new(models.User) - if err := db.Client().Database("test").Collection("users").FindOne(ctx, bson.M{"name": "John Doe"}).Decode(&user); err != nil { + if err := c.Database("test").Collection("users").FindOne(ctx, bson.M{"name": "John Doe"}).Decode(&user); err != nil { return nil, err } return func() error { - d, err := db.Client().Database("test").Collection("users").DeleteOne(ctx, bson.M{"username": "john doe"}) + d, err := c.Database("test").Collection("users").DeleteOne(ctx, bson.M{"username": "john doe"}) if err != nil { return err } @@ -70,7 +66,7 @@ func TestMigration59(t *testing.T) { check: func() (*models.User, error) { user := new(models.User) - if err := db.Client().Database("test").Collection("users").FindOne(ctx, bson.M{"username": "john doe"}).Decode(&user); err != nil { + if err := c.Database("test").Collection("users").FindOne(ctx, bson.M{"username": "john doe"}).Decode(&user); err != nil { return nil, err } @@ -79,9 +75,7 @@ func TestMigration59(t *testing.T) { expected: Expected{ user: &models.User{ ID: "652594bcc7b001c6f298df48", - Namespaces: 0, MaxNamespaces: 0, - Confirmed: false, CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), EmailMarketing: false, @@ -90,7 +84,9 @@ func TestMigration59(t *testing.T) { Email: "johndoe@test.com", Username: "john doe", }, - UserPassword: models.NewUserPassword(""), + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, }, err: nil, }, @@ -99,11 +95,15 @@ func TestMigration59(t *testing.T) { for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + teardown, err := tc.setup() assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), migration59) - assert.NoError(t, migrates.Up(migrate.AllAvailable)) + migrates := migrate.NewMigrate(c.Database("test"), migration59) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) user, err := tc.check() assert.Equal(t, tc.expected, Expected{user, err}) diff --git a/api/store/mongo/migrations/migration_5_test.go b/api/store/mongo/migrations/migration_5_test.go index 43e5a6e0e55..029125c4021 100644 --- a/api/store/mongo/migrations/migration_5_test.go +++ b/api/store/mongo/migrations/migration_5_test.go @@ -4,33 +4,48 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration5(t *testing.T) { - logrus.Info("Testing Migration 5 - Test if the email is set unique") - - db := dbtest.DBServer{} - defer db.Stop() - - user1 := models.User{UserData: models.UserData{Name: "name1", Username: "username1", Email: "email"}, UserPassword: models.NewUserPassword("password")} - user2 := models.User{UserData: models.UserData{Name: "name2", Username: "username2", Email: "email"}, UserPassword: models.NewUserPassword("password")} - - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user1) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + user1 := models.User{ + UserData: models.UserData{ + Name: "name1", + Username: "username1", + Email: "email", + }, + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, + } + user2 := models.User{ + UserData: models.UserData{ + Name: "name2", + Username: "username2", + Email: "email", + }, + Password: models.UserPassword{ + Hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + }, + } + + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user2) + _, err = c.Database("test").Collection("users").InsertOne(context.TODO(), user2) assert.NoError(t, err) - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:4]...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:4]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:5]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:5]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.Error(t, err) } diff --git a/api/store/mongo/migrations/migration_6.go b/api/store/mongo/migrations/migration_6.go index 7b899d9e4fb..ed01a0f2cde 100644 --- a/api/store/mongo/migrations/migration_6.go +++ b/api/store/mongo/migrations/migration_6.go @@ -13,7 +13,7 @@ import ( var migration6 = migrate.Migration{ Version: 6, Description: "Unset unique on status in the devices collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 6, @@ -23,24 +23,24 @@ var migration6 = migrate.Migration{ Keys: bson.D{{"status", 1}}, Options: options.Index().SetName("status").SetUnique(false), } - if _, err := db.Collection("devices").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("devices").Indexes().CreateOne(ctx, mod); err != nil { return err } - _, err := db.Collection("devices").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"status": "accepted"}}) + _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"status": "accepted"}}) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 6, "action": "Down", }).Info("Applying migration") - if _, err := db.Collection("devices").UpdateMany(context.TODO(), bson.M{}, bson.M{"$unset": bson.M{"status": ""}}); err != nil { + if _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"status": ""}}); err != nil { return err } - _, err := db.Collection("status").Indexes().DropOne(context.TODO(), "status") + _, err := db.Collection("status").Indexes().DropOne(ctx, "status") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_60.go b/api/store/mongo/migrations/migration_60.go index 9fc60197a9f..881f4b1f94e 100644 --- a/api/store/mongo/migrations/migration_60.go +++ b/api/store/mongo/migrations/migration_60.go @@ -13,14 +13,14 @@ import ( var migration60 = migrate.Migration{ Version: 60, Description: "create index for tenant_id on active_sessions", - Up: func(database *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 60, "action": "Up", }).Info("Applying migration up") indexName := "tenant_id" - if _, err := database.Collection("active_sessions").Indexes().CreateOne(context.Background(), mongo.IndexModel{ + if _, err := db.Collection("active_sessions").Indexes().CreateOne(context.Background(), mongo.IndexModel{ Keys: bson.M{ "tenant_id": 1, }, @@ -32,17 +32,17 @@ var migration60 = migrate.Migration{ } return nil - }, - Down: func(database *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 60, "action": "Down", }).Info("Applying migration down") - if _, err := database.Collection("active_sessions").Indexes().DropOne(context.Background(), "tenant_id"); err != nil { + if _, err := db.Collection("active_sessions").Indexes().DropOne(context.Background(), "tenant_id"); err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_60_test.go b/api/store/mongo/migrations/migration_60_test.go index c92e450a2f1..4d40a92b8a0 100644 --- a/api/store/mongo/migrations/migration_60_test.go +++ b/api/store/mongo/migrations/migration_60_test.go @@ -5,21 +5,14 @@ import ( "errors" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/envs" envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration60(t *testing.T) { - logrus.Info("Testing Migration 60") - - db := dbtest.DBServer{} - defer db.Stop() - mock := &envMocks.Backend{} envs.DefaultBackend = mock @@ -33,13 +26,13 @@ func TestMigration60(t *testing.T) { mock.On("Get", "SHELLHUB_CLOUD").Return("true").Once() migrations := GenerateMigrations()[59:60] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("active_sessions").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("active_sessions").Indexes().List(context.Background()) if err != nil { return err } @@ -67,13 +60,13 @@ func TestMigration60(t *testing.T) { "Success to apply down on migration 60", func() error { migrations := GenerateMigrations()[59:60] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Down(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) if err != nil { return err } - cursor, err := db.Client().Database("test").Collection("active_sessions").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("active_sessions").Indexes().List(context.Background()) if err != nil { return errors.New("index not dropped") } @@ -102,6 +95,10 @@ func TestMigration60(t *testing.T) { for _, test := range cases { tc := test t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + err := tc.test() assert.NoError(t, err) }) diff --git a/api/store/mongo/migrations/migration_61.go b/api/store/mongo/migrations/migration_61.go index fb6bc8916f1..9e208531569 100644 --- a/api/store/mongo/migrations/migration_61.go +++ b/api/store/mongo/migrations/migration_61.go @@ -12,13 +12,13 @@ import ( var migration61 = migrate.Migration{ Version: 61, Description: "delete devices with empty name", - Up: func(database *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 61, "action": "Up", }).Info("Applying migration up") - if _, err := database.Collection("devices").DeleteMany(context.Background(), bson.M{"$or": bson.A{ + if _, err := db.Collection("devices").DeleteMany(context.Background(), bson.M{"$or": bson.A{ bson.M{"name": ""}, bson.M{"name": bson.M{"$exists": false}}, }}); err != nil { @@ -26,9 +26,9 @@ var migration61 = migrate.Migration{ } return nil - }, - Down: func(database *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { // This migration is not reversible. return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_61_test.go b/api/store/mongo/migrations/migration_61_test.go index 0012088d2a0..999c5e4b242 100644 --- a/api/store/mongo/migrations/migration_61_test.go +++ b/api/store/mongo/migrations/migration_61_test.go @@ -5,24 +5,17 @@ import ( "errors" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/envs" envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration61(t *testing.T) { - logrus.Info("Testing Migration 61") - ctx := context.Background() - db := dbtest.DBServer{} - defer db.Stop() - mock := &envMocks.Backend{} envs.DefaultBackend = mock @@ -34,20 +27,20 @@ func TestMigration61(t *testing.T) { { "Success to apply up on migration 61", func() (func() error, error) { - if _, err := db.Client().Database("test").Collection("devices").InsertOne(ctx, models.Device{ + if _, err := c.Database("test").Collection("devices").InsertOne(ctx, models.Device{ Name: "", }); err != nil { return nil, err } - if _, err := db.Client().Database("test").Collection("devices").InsertOne(ctx, models.Device{ + if _, err := c.Database("test").Collection("devices").InsertOne(ctx, models.Device{ Name: "test", }); err != nil { return nil, err } return func() error { - _, err := db.Client().Database("test").Collection("devices").DeleteOne(ctx, bson.M{ + _, err := c.Database("test").Collection("devices").DeleteOne(ctx, bson.M{ "name": "test", }) if err != nil { @@ -59,13 +52,13 @@ func TestMigration61(t *testing.T) { }, func() error { migrations := GenerateMigrations()[60:61] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) if err != nil { return err } - count, err := db.Client().Database("test").Collection("devices").CountDocuments(ctx, bson.M{"name": ""}) + count, err := c.Database("test").Collection("devices").CountDocuments(ctx, bson.M{"name": ""}) if err != nil { return err } @@ -74,7 +67,7 @@ func TestMigration61(t *testing.T) { return errors.New("failed because don't deleted the expected") } - count, err = db.Client().Database("test").Collection("devices").CountDocuments(ctx, bson.M{"name": "test"}) + count, err = c.Database("test").Collection("devices").CountDocuments(ctx, bson.M{"name": "test"}) if err != nil { return err } @@ -91,6 +84,10 @@ func TestMigration61(t *testing.T) { for _, test := range cases { tc := test t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + teardown, err := tc.setup() assert.NoError(t, err) diff --git a/api/store/mongo/migrations/migration_62.go b/api/store/mongo/migrations/migration_62.go index f45eb868adb..00399fc054e 100644 --- a/api/store/mongo/migrations/migration_62.go +++ b/api/store/mongo/migrations/migration_62.go @@ -13,7 +13,7 @@ import ( var migration62 = migrate.Migration{ Version: 62, Description: "create index for tenant_id on recorded_sessions", - Up: func(database *mongo.Database) error { + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { log.WithFields(log.Fields{ "component": "migration", "version": 62, @@ -21,7 +21,7 @@ var migration62 = migrate.Migration{ }).Info("Applying migration up") indexName := "tenant_id" - _, err := database.Collection("recorded_sessions").Indexes().CreateOne(context.Background(), mongo.IndexModel{ + _, err := db.Collection("recorded_sessions").Indexes().CreateOne(context.Background(), mongo.IndexModel{ Keys: bson.M{ "tenant_id": 1, }, @@ -46,17 +46,17 @@ var migration62 = migrate.Migration{ }).Info("Succeeds to to apply migration 62") return nil - }, - Down: func(database *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { log.WithFields(log.Fields{ "component": "migration", "version": 62, "action": "Down", }).Info("Applying migration down") - if _, err := database.Collection("recorded_sessions").Indexes().DropOne(context.Background(), "tenant_id"); err != nil { + if _, err := db.Collection("recorded_sessions").Indexes().DropOne(context.Background(), "tenant_id"); err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_62_test.go b/api/store/mongo/migrations/migration_62_test.go index c85883d7d1f..f58cdc8fab8 100644 --- a/api/store/mongo/migrations/migration_62_test.go +++ b/api/store/mongo/migrations/migration_62_test.go @@ -5,21 +5,14 @@ import ( "errors" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/envs" envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration62Up(t *testing.T) { - logrus.Info("Testing Migration 62") - - db := dbtest.DBServer{} - defer db.Stop() - cases := []struct { description string mocks func() @@ -33,7 +26,7 @@ func TestMigration62Up(t *testing.T) { mock.On("Get", "SHELLHUB_CLOUD").Return("true").Once() }, expected: func() error { - cursor, err := db.Client().Database("test").Collection("recorded_sessions").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("recorded_sessions").Indexes().List(context.Background()) if err != nil { return err } @@ -61,11 +54,15 @@ func TestMigration62Up(t *testing.T) { for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.mocks() migrations := GenerateMigrations()[61:62] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - assert.NoError(t, migrates.Up(migrate.AllAvailable)) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + assert.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) assert.NoError(t, tc.expected()) }) @@ -73,11 +70,6 @@ func TestMigration62Up(t *testing.T) { } func TestMigration62Down(t *testing.T) { - logrus.Info("Testing Migration 62") - - db := dbtest.DBServer{} - defer db.Stop() - mock := &envMocks.Backend{} envs.DefaultBackend = mock @@ -90,7 +82,7 @@ func TestMigration62Down(t *testing.T) { description: "Success to apply down on migration 62", mocks: func() {}, expected: func() error { - cursor, err := db.Client().Database("test").Collection("recorded_sessions").Indexes().List(context.Background()) + cursor, err := c.Database("test").Collection("recorded_sessions").Indexes().List(context.Background()) if err != nil { return errors.New("index not dropped") } @@ -118,11 +110,15 @@ func TestMigration62Down(t *testing.T) { for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + tc.mocks() migrations := GenerateMigrations()[61:62] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - assert.NoError(t, migrates.Down(migrate.AllAvailable)) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + assert.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) assert.NoError(t, tc.expected()) }) diff --git a/api/store/mongo/migrations/migration_63.go b/api/store/mongo/migrations/migration_63.go index c89f5acaffd..2159e0fce1c 100644 --- a/api/store/mongo/migrations/migration_63.go +++ b/api/store/mongo/migrations/migration_63.go @@ -12,7 +12,7 @@ import ( var migration63 = migrate.Migration{ Version: 63, Description: "add MFA fields to collection users", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 63, @@ -27,13 +27,13 @@ var migration63 = migrate.Migration{ }, } - if _, err := db.Collection("users").UpdateMany(context.TODO(), bson.M{}, update); err != nil { + if _, err := db.Collection("users").UpdateMany(ctx, bson.M{}, update); err != nil { return err } return nil - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 63, @@ -48,10 +48,10 @@ var migration63 = migrate.Migration{ }, } - if _, err := db.Collection("users").UpdateMany(context.TODO(), bson.M{}, update); err != nil { + if _, err := db.Collection("users").UpdateMany(ctx, bson.M{}, update); err != nil { return err } return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_63_test.go b/api/store/mongo/migrations/migration_63_test.go index 50c88f6c4dc..9f60eedcbd6 100644 --- a/api/store/mongo/migrations/migration_63_test.go +++ b/api/store/mongo/migrations/migration_63_test.go @@ -4,19 +4,16 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration63(t *testing.T) { - logrus.Info("Testing Migration 63 - Test whether MFA fields were added to the users collection") - - db := dbtest.DBServer{} - defer db.Stop() + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) user := models.User{ UserData: models.UserData{ @@ -24,32 +21,32 @@ func TestMigration63(t *testing.T) { }, } - _, err := db.Client().Database("test").Collection("users").InsertOne(context.TODO(), user) + _, err := c.Database("test").Collection("users").InsertOne(context.TODO(), user) assert.NoError(t, err) migrations := GenerateMigrations()[62:63] - migrates := migrate.NewMigrate(db.Client().Database("test"), migrations...) - err = migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - version, _, err := migrates.Version() + version, _, err := migrates.Version(context.Background()) assert.NoError(t, err) assert.Equal(t, uint64(63), version) var migratedUser *models.User - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": user.Name}).Decode(&migratedUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": user.Name}).Decode(&migratedUser) assert.NoError(t, err) - assert.False(t, migratedUser.MFA) - assert.Equal(t, "", migratedUser.Secret) - assert.Empty(t, migratedUser.Codes) + assert.False(t, migratedUser.MFA.Enabled) + assert.Equal(t, "", migratedUser.MFA.Secret) + assert.Empty(t, migratedUser.MFA.RecoveryCodes) - err = migrates.Down(migrate.AllAvailable) + err = migrates.Down(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - err = db.Client().Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": user.Name}).Decode(&migratedUser) + err = c.Database("test").Collection("users").FindOne(context.TODO(), bson.M{"name": user.Name}).Decode(&migratedUser) assert.NoError(t, err) - assert.False(t, migratedUser.MFA) - assert.Equal(t, "", migratedUser.Secret) - assert.Empty(t, migratedUser.Codes) + assert.False(t, migratedUser.MFA.Enabled) + assert.Equal(t, "", migratedUser.MFA.Secret) + assert.Empty(t, migratedUser.MFA.RecoveryCodes) } diff --git a/api/store/mongo/migrations/migration_64.go b/api/store/mongo/migrations/migration_64.go new file mode 100644 index 00000000000..ae1d6c05214 --- /dev/null +++ b/api/store/mongo/migrations/migration_64.go @@ -0,0 +1,61 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration64 = migrate.Migration{ + Version: 64, + Description: "Adding the 'settings.connection_announcement' attribute to the namespace if it does not already exist.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 64, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "settings.connection_announcement": bson.M{"$in": []interface{}{nil, ""}}, + } + + update := bson.M{ + "$set": bson.M{ + "settings.connection_announcement": "", + }, + } + + _, err := db. + Collection("namespaces"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 64, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "settings.connection_announcement": bson.M{"$in": []interface{}{nil, ""}}, + } + + update := bson.M{ + "$unset": bson.M{ + "settings.connection_announcement": "", + }, + } + + _, err := db. + Collection("namespaces"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_64_test.go b/api/store/mongo/migrations/migration_64_test.go new file mode 100644 index 00000000000..74ceb470696 --- /dev/null +++ b/api/store/mongo/migrations/migration_64_test.go @@ -0,0 +1,78 @@ +package migrations + +import ( + "context" + "errors" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration64(t *testing.T) { + ctx := context.Background() + + mock := &envMocks.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 64", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Settings: &models.NamespaceSettings{}, + }) + + return err + }, + test: func() error { + migrations := GenerateMigrations()[63:64] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + ns := new(models.Namespace) + if err := query.Decode(ns); err != nil { + return errors.New("unable to find the namespace") + } + + if ns.Settings.ConnectionAnnouncement != "" { + return errors.New("unable to apply the migration") + } + + return nil + }, + }, + } + + for _, test := range cases { + tc := test + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + assert.NoError(t, tc.test()) + }) + } +} diff --git a/api/store/mongo/migrations/migration_65.go b/api/store/mongo/migrations/migration_65.go new file mode 100644 index 00000000000..9a7f9371294 --- /dev/null +++ b/api/store/mongo/migrations/migration_65.go @@ -0,0 +1,61 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration65 = migrate.Migration{ + Version: 65, + Description: "Adding the 'recovery_email' attribute to the user if it does not already exist.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 65, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "recovery_email": bson.M{"$in": []interface{}{nil, ""}}, + } + + update := bson.M{ + "$set": bson.M{ + "recovery_email": "", + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 65, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "_id": bson.M{"$ne": nil}, + } + + update := bson.M{ + "$unset": bson.M{ + "recovery_email": "", + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_65_test.go b/api/store/mongo/migrations/migration_65_test.go new file mode 100644 index 00000000000..dc0fc726c13 --- /dev/null +++ b/api/store/mongo/migrations/migration_65_test.go @@ -0,0 +1,77 @@ +package migrations + +import ( + "context" + "errors" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration65(t *testing.T) { + ctx := context.Background() + + mock := &envMocks.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 65", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, models.User{ + UserData: models.UserData{ + Username: "john_doe", + }, + }) + + return err + }, + test: func() error { + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[64]) + if err := migrates.Up(context.Background(), migrate.AllAvailable); err != nil { + return err + } + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"username": "john_doe"}) + + user := new(models.User) + if err := query.Decode(user); err != nil { + return errors.New("unable to find the user") + } + + if user.RecoveryEmail != "" { + return errors.New("unable to apply the migration") + } + + return nil + }, + }, + } + + for _, test := range cases { + tc := test + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + assert.NoError(t, tc.test()) + }) + } +} diff --git a/api/store/mongo/migrations/migration_66.go b/api/store/mongo/migrations/migration_66.go new file mode 100644 index 00000000000..525362be39f --- /dev/null +++ b/api/store/mongo/migrations/migration_66.go @@ -0,0 +1,67 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration66 = migrate.Migration{ + Version: 66, + Description: "Replace the user's MFA attributes.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log. + WithFields(log.Fields{ + "component": "migration", + "version": 66, + "action": "Up", + }). + Info("Applying migration") + + filter := bson.M{ + "_id": bson.M{ + "$ne": nil, + }, + } + + rename := bson.M{ + "$rename": bson.M{ + "status_mfa": "mfa.enabled", + "secret": "mfa.secret", + "codes": "mfa.recovery_codes", + }, + } + + if _, err := db.Collection("users").UpdateMany(ctx, filter, rename); err != nil { + return err + } + + unset := bson.M{ + "$unset": bson.M{ + "status_mfa": "", + "secret": "", + "codes": "", + }, + } + + _, err := db.Collection("users").UpdateMany(ctx, filter, unset) + + return err + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { + log. + WithFields(log.Fields{ + "component": "migration", + "version": 66, + "action": "Up", + }). + Info("Applying migration") + + log.Info("Unable to undo the MFA object") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_66_test.go b/api/store/mongo/migrations/migration_66_test.go new file mode 100644 index 00000000000..3fbc6686f0f --- /dev/null +++ b/api/store/mongo/migrations/migration_66_test.go @@ -0,0 +1,84 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration66Up(t *testing.T) { + ctx := context.Background() + + mock := &envMocks.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + expected map[string]interface{} + }{ + { + description: "Success to apply up on migration 66", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "username": "john_doe", + "status_mfa": true, + "secret": "secret", + "codes": []string{"code-1", "code-2"}, + }) + + return err + }, + expected: map[string]interface{}{ + "mfa": map[string]interface{}{ + "enabled": true, + "secret": "secret", + "recovery_codes": primitive.A{"code-1", "code-2"}, + }, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[65]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"username": "john_doe"}) + + user := make(map[string]interface{}) + require.NoError(t, query.Decode(&user)) + + attr, ok := user["mfa"] + require.Equal(t, true, ok) + require.Equal(t, tc.expected["mfa"], attr) + + _, ok = user["status_mfa"] + require.Equal(t, false, ok) + _, ok = user["secret"] + require.Equal(t, false, ok) + _, ok = user["codes"] + require.Equal(t, false, ok) + }) + } +} diff --git a/api/store/mongo/migrations/migration_67.go b/api/store/mongo/migrations/migration_67.go new file mode 100644 index 00000000000..2127021c9fb --- /dev/null +++ b/api/store/mongo/migrations/migration_67.go @@ -0,0 +1,91 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/hash" + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration67 = migrate.Migration{ + Version: 67, + Description: "Hash the user's MFA recovery code before storing it as a plain string.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log. + WithFields(log.Fields{ + "component": "migration", + "version": 67, + "action": "Up", + }). + Info("Applying migration") + + pipeline := []bson.M{ + { + "$match": bson.M{ + "mfa.enabled": true, + "mfa.recovery_codes.0": bson.M{ + "$not": bson.M{ + "$regex": "^\\$", + }, + }, + }, + }, + } + + cursor, err := db.Collection("users").Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + + for cursor.Next(ctx) { + user := new(models.User) + if err := cursor.Decode(user); err != nil { + return err + } + + recoveryCodes := make([]string, 0) + for _, c := range user.MFA.RecoveryCodes { + hash, err := hash.Do(c) + if err != nil { + return err + } + + recoveryCodes = append(recoveryCodes, hash) + + } + + filter := bson.M{"username": user.Username} + update := bson.M{"$set": bson.M{"mfa.recovery_codes": recoveryCodes}} + + updateModels = append(updateModels, mongo.NewUpdateOneModel().SetFilter(filter).SetUpdate(update).SetUpsert(false)) + } + + if len(updateModels) > 0 { + if _, err := db.Collection("users").BulkWrite(ctx, updateModels); err != nil { + return err + } + } + + return nil + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { + log. + WithFields(log.Fields{ + "component": "migration", + "version": 67, + "action": "Down", + }). + Info("Applying migration") + + log.Info("Unable to undo the recovery code hash") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_67_test.go b/api/store/mongo/migrations/migration_67_test.go new file mode 100644 index 00000000000..fe728ca5e74 --- /dev/null +++ b/api/store/mongo/migrations/migration_67_test.go @@ -0,0 +1,82 @@ +package migrations + +import ( + "context" + "strings" + "testing" + + "github.com/labstack/gommon/log" + "github.com/shellhub-io/shellhub/pkg/envs" + envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/hash" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration67Up(t *testing.T) { + ctx := context.Background() + + mock := &envMocks.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + recoveryCodes []string + test func() error + }{ + { + description: "Success to apply up on migration 67", + recoveryCodes: []string{"secret-1", "secret-2"}, + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, models.User{ + UserData: models.UserData{ + Username: "john_doe", + }, + MFA: models.UserMFA{ + Enabled: true, + RecoveryCodes: []string{"secret-1", "secret-2"}, + }, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[66]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"username": "john_doe"}) + + user := new(models.User) + require.NoError(t, query.Decode(user)) + + log.Infof("user: %+v", user) + + require.Equal(t, len(tc.recoveryCodes), len(user.MFA.RecoveryCodes)) + for i, c := range tc.recoveryCodes { + require.NotEqual(t, c, user.MFA.RecoveryCodes[i]) + require.Equal(t, true, strings.HasPrefix(user.MFA.RecoveryCodes[i], "$")) + require.Equal(t, true, hash.CompareWith(c, user.MFA.RecoveryCodes[i])) + } + }) + } +} diff --git a/api/store/mongo/migrations/migration_68.go b/api/store/mongo/migrations/migration_68.go new file mode 100644 index 00000000000..d06123d32ad --- /dev/null +++ b/api/store/mongo/migrations/migration_68.go @@ -0,0 +1,81 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration68 = migrate.Migration{ + Version: 68, + Description: "Rename `api_keys.user_id` to `api_keys.created_by`.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log. + WithFields(log.Fields{ + "component": "migration", + "version": 68, + "action": "Up", + }). + Info("Applying migration") + + filter := bson.M{ + "user_id": bson.M{"$nin": []interface{}{nil, ""}}, + } + + rename := bson.M{ + "$rename": bson.M{ + "user_id": "created_by", + }, + } + + if _, err := db.Collection("api_keys").UpdateMany(ctx, filter, rename); err != nil { + return err + } + + unset := bson.M{ + "$unset": bson.M{ + "user_id": "", + }, + } + + _, err := db.Collection("api_keys").UpdateMany(ctx, filter, unset) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log. + WithFields(log.Fields{ + "component": "migration", + "version": 68, + "action": "Down", + }). + Info("Applying migration") + + filter := bson.M{ + "created_by": bson.M{"$nin": []interface{}{nil, ""}}, + } + + rename := bson.M{ + "$rename": bson.M{ + "created_by": "user_id", + }, + } + + if _, err := db.Collection("api_keys").UpdateMany(ctx, filter, rename); err != nil { + return err + } + + unset := bson.M{ + "$unset": bson.M{ + "created_by": "", + }, + } + + _, err := db.Collection("api_keys").UpdateMany(ctx, filter, unset) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_68_test.go b/api/store/mongo/migrations/migration_68_test.go new file mode 100644 index 00000000000..8f9e17b54e8 --- /dev/null +++ b/api/store/mongo/migrations/migration_68_test.go @@ -0,0 +1,136 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration68Up(t *testing.T) { + ctx := context.Background() + + mock := &envMocks.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + expected map[string]interface{} + }{ + { + description: "Success to apply up on migration 68", + setup: func() error { + _, err := c. + Database("test"). + Collection("api_keys"). + InsertOne(ctx, map[string]interface{}{ + "name": "dev", + "user_id": "000000000000000000000000", + }) + + return err + }, + expected: map[string]interface{}{ + "name": "dev", + "created_by": "000000000000000000000000", + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[67]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("api_keys"). + FindOne(context.TODO(), bson.M{"name": "dev"}) + + apiKey := make(map[string]interface{}) + require.NoError(t, query.Decode(&apiKey)) + + _, ok := apiKey["user_id"] + require.Equal(t, false, ok) + + attr, ok := apiKey["created_by"] + require.Equal(t, true, ok) + require.Equal(t, tc.expected["created_by"], attr) + }) + } +} + +func TestMigration68Down(t *testing.T) { + ctx := context.Background() + + mock := &envMocks.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + expected map[string]interface{} + }{ + { + description: "Success to apply down on migration 68", + setup: func() error { + _, err := c. + Database("test"). + Collection("api_keys"). + InsertOne(ctx, map[string]interface{}{ + "name": "dev", + "created_by": "000000000000000000000000", + }) + + return err + }, + expected: map[string]interface{}{ + "name": "dev", + "user_id": "000000000000000000000000", + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[67]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("api_keys"). + FindOne(context.TODO(), bson.M{"name": "dev"}) + + apiKey := make(map[string]interface{}) + require.NoError(t, query.Decode(&apiKey)) + + _, ok := apiKey["created_by"] + require.Equal(t, false, ok) + + attr, ok := apiKey["user_id"] + require.Equal(t, true, ok) + require.Equal(t, tc.expected["user_id"], attr) + }) + } +} diff --git a/api/store/mongo/migrations/migration_69.go b/api/store/mongo/migrations/migration_69.go new file mode 100644 index 00000000000..9d2c069a5f5 --- /dev/null +++ b/api/store/mongo/migrations/migration_69.go @@ -0,0 +1,106 @@ +package migrations + +import ( + "context" + "crypto/sha256" + "encoding/hex" + + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration69 = migrate.Migration{ + Version: 69, + Description: "Hash API key ID. It will delete the old document and create a new one with the hashed ID.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log. + WithFields(log.Fields{ + "component": "migration", + "version": 69, + "action": "Up", + }). + Info("Applying migration") + + pipeline := []bson.M{ + { + "$match": bson.M{ + "_id": bson.M{ + "$regex": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-4[a-fA-F0-9]{3}-[8|9|aA|bB][a-fA-F0-9]{3}-[a-fA-F0-9]{12}$", + }, + }, + }, + } + + cursor, err := db.Collection("api_keys").Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + deleteModels := make([]mongo.WriteModel, 0) + + for cursor.Next(ctx) { + apiKey := new(models.APIKey) + if err := cursor.Decode(apiKey); err != nil { + return err + } + + idSum := sha256.Sum256([]byte(apiKey.ID)) + hashedID := hex.EncodeToString(idSum[:]) + + doc := &models.APIKey{ + ID: hashedID, + Name: apiKey.Name, + CreatedBy: apiKey.CreatedBy, + TenantID: apiKey.TenantID, + Role: apiKey.Role, + CreatedAt: apiKey.CreatedAt, + UpdatedAt: apiKey.UpdatedAt, + ExpiresIn: apiKey.ExpiresIn, + } + + deleteModels = append(deleteModels, mongo.NewDeleteOneModel().SetFilter(bson.M{"_id": apiKey.ID})) + updateModels = append(updateModels, mongo.NewInsertOneModel().SetDocument(doc)) + } + + if len(updateModels) > 0 || len(deleteModels) > 0 { + mongoSession, err := db.Client().StartSession() + if err != nil { + return err + } + defer mongoSession.EndSession(ctx) + + _, err = mongoSession.WithTransaction(ctx, func(_ mongo.SessionContext) (interface{}, error) { + if _, err := db.Collection("api_keys").BulkWrite(ctx, updateModels); err != nil { + return nil, err + } + + _, err := db.Collection("api_keys").BulkWrite(ctx, deleteModels) + + return nil, err + }) + if err != nil { + return err + } + } + + return nil + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { + log. + WithFields(log.Fields{ + "component": "migration", + "version": 69, + "action": "Down", + }). + Info("Applying migration") + + log.Info("Unable to undo the api key hash") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_69_test.go b/api/store/mongo/migrations/migration_69_test.go new file mode 100644 index 00000000000..8f6f0a8b760 --- /dev/null +++ b/api/store/mongo/migrations/migration_69_test.go @@ -0,0 +1,76 @@ +package migrations + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + mongodb "go.mongodb.org/mongo-driver/mongo" +) + +func TestMigration69Up(t *testing.T) { + ctx := context.Background() + + mock := &envMocks.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + plainID string + test func() error + }{ + { + description: "Success to apply up on migration 69", + plainID: "343d67d3-5084-4845-ab10-59891c88ec76", + setup: func() error { + _, err := c. + Database("test"). + Collection("api_keys"). + InsertOne(ctx, models.APIKey{ID: "343d67d3-5084-4845-ab10-59891c88ec76"}) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[68]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + idSum := sha256.Sum256([]byte(tc.plainID)) + hashedID := hex.EncodeToString(idSum[:]) + + old := c. + Database("test"). + Collection("api_keys"). + FindOne(context.TODO(), bson.M{"_id": tc.plainID}). + Decode(&models.APIKey{}) + require.Equal(t, mongodb.ErrNoDocuments, old) + + query := c. + Database("test"). + Collection("api_keys"). + FindOne(context.TODO(), bson.M{"_id": hashedID}) + + apiKey := new(models.APIKey) + require.NoError(t, query.Decode(apiKey)) + require.Equal(t, hashedID, apiKey.ID) + }) + } +} diff --git a/api/store/mongo/migrations/migration_6_test.go b/api/store/mongo/migrations/migration_6_test.go index 42a5e10b499..31bbf332987 100644 --- a/api/store/mongo/migrations/migration_6_test.go +++ b/api/store/mongo/migrations/migration_6_test.go @@ -4,21 +4,18 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration6(t *testing.T) { - logrus.Info("Testing Migration 6 - Test if the status is not unique") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:5]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:5]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) device1 := models.Device{ @@ -29,13 +26,13 @@ func TestMigration6(t *testing.T) { Status: "accepted", } - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device1) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device2) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device2) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:6]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:6]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_7.go b/api/store/mongo/migrations/migration_7.go index a5cee225b81..11a0d44dc2e 100644 --- a/api/store/mongo/migrations/migration_7.go +++ b/api/store/mongo/migrations/migration_7.go @@ -13,7 +13,7 @@ import ( var migration7 = migrate.Migration{ Version: 7, Description: "Unset unique on uid and message in the recoded_sessions collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 7, @@ -23,7 +23,7 @@ var migration7 = migrate.Migration{ Keys: bson.D{{"uid", 1}}, Options: options.Index().SetName("uid").SetUnique(false), } - if _, err := db.Collection("recorded_sessions").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("recorded_sessions").Indexes().CreateOne(ctx, mod); err != nil { return err } @@ -31,27 +31,27 @@ var migration7 = migrate.Migration{ Keys: bson.D{{"message", 1}}, Options: options.Index().SetName("message").SetUnique(false), } - _, err := db.Collection("recorded_sessions").Indexes().CreateOne(context.TODO(), mod) + _, err := db.Collection("recorded_sessions").Indexes().CreateOne(ctx, mod) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 7, "action": "Down", }).Info("Applying migration") - if _, err := db.Collection("recorded_sessions").UpdateMany(context.TODO(), bson.M{}, bson.M{"$unset": bson.M{"uid": ""}}); err != nil { + if _, err := db.Collection("recorded_sessions").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"uid": ""}}); err != nil { return err } - if _, err := db.Collection("recorded_sessions").UpdateMany(context.TODO(), bson.M{}, bson.M{"$unset": bson.M{"message": ""}}); err != nil { + if _, err := db.Collection("recorded_sessions").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"message": ""}}); err != nil { return err } - if _, err := db.Collection("recorded_sessions").Indexes().DropOne(context.TODO(), "uid"); err != nil { + if _, err := db.Collection("recorded_sessions").Indexes().DropOne(ctx, "uid"); err != nil { return err } - _, err := db.Collection("recorded_sessions").Indexes().DropOne(context.TODO(), "message") + _, err := db.Collection("recorded_sessions").Indexes().DropOne(ctx, "message") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_70.go b/api/store/mongo/migrations/migration_70.go new file mode 100644 index 00000000000..ee453654522 --- /dev/null +++ b/api/store/mongo/migrations/migration_70.go @@ -0,0 +1,61 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration70 = migrate.Migration{ + Version: 70, + Description: "Adding the 'preferences' attribute to the user if it does not already exist.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 70, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "preferences": bson.M{"$exists": false}, + } + + update := bson.M{ + "$set": bson.M{ + "preferences": bson.M{}, + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 70, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "preferences": bson.M{"$exists": true}, + } + + update := bson.M{ + "$unset": bson.M{ + "preferences": "", + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_70_test.go b/api/store/mongo/migrations/migration_70_test.go new file mode 100644 index 00000000000..7b14993a9fb --- /dev/null +++ b/api/store/mongo/migrations/migration_70_test.go @@ -0,0 +1,117 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration70Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 70", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[69]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(t, query.Decode(&user)) + + _, ok := user["preferences"] + require.Equal(t, true, ok) + }) + } +} + +func TestMigration70Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 70", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + "preferences": map[string]interface{}{}, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[69]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(t, query.Decode(&user)) + + _, ok := user["preferences"] + require.Equal(t, false, ok) + }) + } +} diff --git a/api/store/mongo/migrations/migration_71.go b/api/store/mongo/migrations/migration_71.go new file mode 100644 index 00000000000..aa83563bd77 --- /dev/null +++ b/api/store/mongo/migrations/migration_71.go @@ -0,0 +1,63 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration71 = migrate.Migration{ + Version: 71, + Description: "Adding the 'preferences.preferred_namespace' attribute to the user if it does not already exist.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 71, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "preferences": bson.M{"$exists": true}, + "preferences.preferred_namespace": bson.M{"$exists": false}, + } + + update := bson.M{ + "$set": bson.M{ + "preferences.preferred_namespace": "", + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 71, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "preferences": bson.M{"$exists": true}, + "preferences.preferred_namespace": bson.M{"$exists": true}, + } + + update := bson.M{ + "$unset": bson.M{ + "preferences.preferred_namespace": "", + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_71_test.go b/api/store/mongo/migrations/migration_71_test.go new file mode 100644 index 00000000000..6696ad1f980 --- /dev/null +++ b/api/store/mongo/migrations/migration_71_test.go @@ -0,0 +1,122 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration71Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 71", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + "preferences": map[string]interface{}{}, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[70]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(t, query.Decode(&user)) + + preferences := user["preferences"] + _, ok := preferences.(map[string]interface{})["preferred_namespace"] + require.Equal(t, true, ok) + }) + } +} + +func TestMigration71Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 71", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + "preferences": map[string]interface{}{ + "preferred_namespace": "00000000-0000-4000-0000-000000000000", + }, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[70]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(t, query.Decode(&user)) + + preferences := user["preferences"] + _, ok := preferences.(map[string]interface{})["preferred_namespace"] + require.Equal(t, false, ok) + }) + } +} diff --git a/api/store/mongo/migrations/migration_72.go b/api/store/mongo/migrations/migration_72.go new file mode 100644 index 00000000000..d888c752c07 --- /dev/null +++ b/api/store/mongo/migrations/migration_72.go @@ -0,0 +1,135 @@ +package migrations + +import ( + "context" + "time" + + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +// Member struct as it was when migration 72 was created (with Status field) +type memberForMigration72 struct { + ID string `json:"id,omitempty" bson:"id,omitempty"` + AddedAt time.Time `json:"added_at" bson:"added_at"` + Email string `json:"email" bson:"email,omitempty" validate:"email"` + Role authorizer.Role `json:"role" bson:"role" validate:"required,oneof=administrator operator observer"` + Status string `json:"status" bson:"status"` +} + +// Namespace struct for migration 72 with the old Member type +type namespaceForMigration72 struct { + models.Namespace `bson:",inline"` + Members []memberForMigration72 `json:"members" bson:"members"` +} + +var migration72 = migrate.Migration{ + Version: 72, + Description: "Adding the 'members.$.status' attribute to the namespace if it does not already exist.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 72, + "action": "Up", + }).Info("Applying migration") + + pipeline := []bson.M{ + { + "$match": bson.M{ + "tenant_id": bson.M{ + "$exists": true, + }, + }, + }, + } + + cursor, err := db.Collection("namespaces").Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + + for cursor.Next(ctx) { + namespace := new(namespaceForMigration72) + if err := cursor.Decode(namespace); err != nil { + return err + } + + for _, m := range namespace.Members { + if m.Status == "" { + updateModel := mongo. + NewUpdateOneModel(). + SetFilter(bson.M{"tenant_id": namespace.TenantID, "members": bson.M{"$elemMatch": bson.M{"id": m.ID}}}). + SetUpdate(bson.M{"$set": bson.M{"members.$.status": "accepted"}}) + + updateModels = append(updateModels, updateModel) + } + } + } + + if len(updateModels) == 0 { + return nil + } + + _, err = db.Collection("namespaces").BulkWrite(ctx, updateModels) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 72, + "action": "Down", + }).Info("Reverting migration") + + pipeline := []bson.M{ + { + "$match": bson.M{ + "tenant_id": bson.M{ + "$exists": true, + }, + }, + }, + } + + cursor, err := db.Collection("namespaces").Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + + for cursor.Next(ctx) { + namespace := new(namespaceForMigration72) + if err := cursor.Decode(namespace); err != nil { + return err + } + + for _, m := range namespace.Members { + if m.Status != "" { + updateModel := mongo. + NewUpdateOneModel(). + SetFilter(bson.M{"tenant_id": namespace.TenantID, "members": bson.M{"$elemMatch": bson.M{"id": m.ID}}}). + SetUpdate(bson.M{"$unset": bson.M{"members.$.status": ""}}) + + updateModels = append(updateModels, updateModel) + } + } + } + + if len(updateModels) == 0 { + return nil + } + + _, err = db.Collection("namespaces").BulkWrite(ctx, updateModels) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_72_test.go b/api/store/mongo/migrations/migration_72_test.go new file mode 100644 index 00000000000..dd7c32648f4 --- /dev/null +++ b/api/store/mongo/migrations/migration_72_test.go @@ -0,0 +1,135 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration72Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 72", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, map[string]interface{}{ + "tenant_id": "00000000-0000-4000-0000-000000000000", + "members": []map[string]interface{}{ + { + "id": "000000000000000000000000", + }, + }, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[71]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + namespace := make(map[string]interface{}) + require.NoError(t, query.Decode(&namespace)) + + members := namespace["members"].(primitive.A) + for _, m := range members { + val, ok := m.(map[string]interface{})["status"] + require.Equal(t, true, ok) + require.Equal(t, "accepted", val) + } + }) + } +} + +func TestMigration72Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 72", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, map[string]interface{}{ + "tenant_id": "00000000-0000-4000-0000-000000000000", + "members": []map[string]interface{}{ + { + "id": "000000000000000000000000", + "status": "accepted", + }, + }, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[71]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + namespace := make(map[string]interface{}) + require.NoError(t, query.Decode(&namespace)) + + members := namespace["members"].(primitive.A) + for _, m := range members { + _, ok := m.(map[string]interface{})["status"] + require.Equal(t, false, ok) + } + }) + } +} diff --git a/api/store/mongo/migrations/migration_73.go b/api/store/mongo/migrations/migration_73.go new file mode 100644 index 00000000000..3a6f8aac75f --- /dev/null +++ b/api/store/mongo/migrations/migration_73.go @@ -0,0 +1,120 @@ +package migrations + +import ( + "context" + "time" + + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration73 = migrate.Migration{ + Version: 73, + Description: "Adding the 'members.$.added_at' attribute to the namespace if it does not already exist. The value is the Go time.Time zeroer", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 73, + "action": "Up", + }).Info("Applying migration") + + pipeline := []bson.M{ + { + "$match": bson.M{ + "tenant_id": bson.M{ + "$exists": true, + }, + }, + }, + } + + cursor, err := db.Collection("namespaces").Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + + for cursor.Next(ctx) { + namespace := new(models.Namespace) + if err := cursor.Decode(namespace); err != nil { + return err + } + + for _, m := range namespace.Members { + if m.AddedAt.Equal((time.Time{})) { + updateModel := mongo. + NewUpdateOneModel(). + SetFilter(bson.M{"tenant_id": namespace.TenantID, "members": bson.M{"$elemMatch": bson.M{"id": m.ID}}}). + // We update the added_at field to the same value as in the if statement + // because when the attribute is null in MongoDB, it will be converted + // to the zero value of time.Time. + SetUpdate(bson.M{"$set": bson.M{"members.$.added_at": time.Time{}}}) + + updateModels = append(updateModels, updateModel) + } + } + } + + if len(updateModels) == 0 { + return nil + } + + _, err = db.Collection("namespaces").BulkWrite(ctx, updateModels) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 73, + "action": "Down", + }).Info("Reverting migration") + + pipeline := []bson.M{ + { + "$match": bson.M{ + "tenant_id": bson.M{ + "$exists": true, + }, + }, + }, + } + + cursor, err := db.Collection("namespaces").Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + + for cursor.Next(ctx) { + namespace := new(models.Namespace) + if err := cursor.Decode(namespace); err != nil { + return err + } + + for _, m := range namespace.Members { + updateModel := mongo. + NewUpdateOneModel(). + SetFilter(bson.M{"tenant_id": namespace.TenantID, "members": bson.M{"$elemMatch": bson.M{"id": m.ID}}}). + SetUpdate(bson.M{"$unset": bson.M{"members.$.added_at": ""}}) + + updateModels = append(updateModels, updateModel) + } + } + + if len(updateModels) == 0 { + return nil + } + + _, err = db.Collection("namespaces").BulkWrite(ctx, updateModels) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_73_test.go b/api/store/mongo/migrations/migration_73_test.go new file mode 100644 index 00000000000..d1ad8aee8b8 --- /dev/null +++ b/api/store/mongo/migrations/migration_73_test.go @@ -0,0 +1,135 @@ +package migrations + +import ( + "context" + "testing" + "time" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration73Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 73", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, map[string]interface{}{ + "tenant_id": "00000000-0000-4000-0000-000000000000", + "members": []map[string]interface{}{ + { + "id": "000000000000000000000000", + }, + }, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[72]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + namespace := make(map[string]interface{}) + require.NoError(t, query.Decode(&namespace)) + + members := namespace["members"].(primitive.A) + for _, m := range members { + _, ok := m.(map[string]interface{})["added_at"] + require.Equal(t, true, ok) + } + }) + } +} + +func TestMigration73Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 73", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, map[string]interface{}{ + "tenant_id": "00000000-0000-4000-0000-000000000000", + "members": []map[string]interface{}{ + { + "id": "000000000000000000000000", + "added_at": time.Now(), + }, + }, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[72]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + namespace := make(map[string]interface{}) + require.NoError(t, query.Decode(&namespace)) + + members := namespace["members"].(primitive.A) + for _, m := range members { + _, ok := m.(map[string]interface{})["added_at"] + require.Equal(t, false, ok) + } + }) + } +} diff --git a/api/store/mongo/migrations/migration_74.go b/api/store/mongo/migrations/migration_74.go new file mode 100644 index 00000000000..b195729cee8 --- /dev/null +++ b/api/store/mongo/migrations/migration_74.go @@ -0,0 +1,68 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration74 = migrate.Migration{ + Version: 74, + Description: "Adding default message on announcement if is not set.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 74, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "settings.connection_announcement": "", + } + + annoucementMsg := "" + if envs.IsCommunity() { + annoucementMsg = models.DefaultAnnouncementMessage + } + + update := bson.M{ + "$set": bson.M{ + "settings.connection_announcement": annoucementMsg, + }, + } + + _, err := db. + Collection("namespaces"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 74, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "settings.connection_announcement": models.DefaultAnnouncementMessage, + } + + update := bson.M{ + "$set": bson.M{ + "settings.connection_announcement": "", + }, + } + + _, err := db. + Collection("namespaces"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_74_test.go b/api/store/mongo/migrations/migration_74_test.go new file mode 100644 index 00000000000..34a6be6cfa6 --- /dev/null +++ b/api/store/mongo/migrations/migration_74_test.go @@ -0,0 +1,167 @@ +package migrations + +import ( + "context" + "errors" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + env_mocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +var envMock *env_mocks.Backend + +func TestMigration74(t *testing.T) { + ctx := context.Background() + + envMock = &env_mocks.Backend{} + envs.DefaultBackend = envMock + + cases := []struct { + description string + setup func() error + requireMocks func() + test func() error + }{ + { + description: "Success to apply up on migration 74, without message on cloud", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Settings: &models.NamespaceSettings{}, + }) + + return err + }, + requireMocks: func() { + envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Once() + }, + test: func() error { + migrations := GenerateMigrations()[73:74] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + ns := new(models.Namespace) + if err := query.Decode(ns); err != nil { + return errors.New("unable to find the namespace") + } + + if ns.Settings.ConnectionAnnouncement != "" { + return errors.New("unable to apply the migration") + } + + return nil + }, + }, { + description: "Success to apply up on migration 74, with message on community", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Settings: &models.NamespaceSettings{}, + }) + + return err + }, + requireMocks: func() { + envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() + envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() + }, + test: func() error { + migrations := GenerateMigrations()[73:74] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + ns := new(models.Namespace) + if err := query.Decode(ns); err != nil { + return errors.New("unable to find the namespace") + } + + if ns.Settings.ConnectionAnnouncement != models.DefaultAnnouncementMessage { + return errors.New("unable to apply the migration") + } + + return nil + }, + }, { + description: "Success to unapply the migration 74", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Settings: &models.NamespaceSettings{}, + }) + + return err + }, + requireMocks: func() {}, + test: func() error { + migrations := GenerateMigrations()[73:74] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + ns := new(models.Namespace) + if err := query.Decode(ns); err != nil { + return errors.New("unable to find the namespace") + } + + if ns.Settings.ConnectionAnnouncement != "" { + return errors.New("unable to unapply the migration") + } + + return nil + }, + }, + } + + for _, test := range cases { + tc := test + t.Run(tc.description, func(t *testing.T) { + tc.requireMocks() + + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + assert.NoError(t, tc.test()) + + envMock.AssertExpectations(t) + }) + } +} diff --git a/api/store/mongo/migrations/migration_75.go b/api/store/mongo/migrations/migration_75.go new file mode 100644 index 00000000000..9e4aec7032c --- /dev/null +++ b/api/store/mongo/migrations/migration_75.go @@ -0,0 +1,126 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration75 = migrate.Migration{ + Version: 75, + Description: "Convert user.confirmed to user.status", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 75, + "action": "Up", + }).Info("Applying migration") + + pipeline := []bson.M{ + { + "$match": bson.M{ + "confirmed": bson.M{ + "$exists": true, + }, + "status": bson.M{ + "$exists": false, + }, + }, + }, + } + + cursor, err := db.Collection("users").Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + + for cursor.Next(ctx) { + user := make(map[string]interface{}) + if err := cursor.Decode(&user); err != nil { + return err + } + + updateModel := mongo. + NewUpdateOneModel(). + SetFilter(bson.M{"_id": user["_id"]}) + + if confirmed := user["confirmed"]; confirmed == true { + updateModel.SetUpdate(bson.M{"$set": bson.M{"status": models.UserStatusConfirmed.String()}, "$unset": bson.M{"confirmed": ""}}) + } else { + updateModel.SetUpdate(bson.M{"$set": bson.M{"status": models.UserStatusNotConfirmed.String()}, "$unset": bson.M{"confirmed": ""}}) + } + + updateModels = append(updateModels, updateModel) + } + + if len(updateModels) == 0 { + return nil + } + + _, err = db.Collection("users").BulkWrite(ctx, updateModels) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 75, + "action": "Down", + }).Info("Reverting migration") + + pipeline := []bson.M{ + { + "$match": bson.M{ + "confirmed": bson.M{ + "$exists": false, + }, + "status": bson.M{ + "$exists": true, + }, + }, + }, + } + + cursor, err := db.Collection("users").Aggregate(ctx, pipeline) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + + for cursor.Next(ctx) { + user := make(map[string]interface{}) + if err := cursor.Decode(&user); err != nil { + return err + } + + updateModel := mongo. + NewUpdateOneModel(). + SetFilter(bson.M{"_id": user["_id"]}) + + if status := user["status"].(string); status == models.UserStatusConfirmed.String() { + updateModel.SetUpdate(bson.M{"$set": bson.M{"confirmed": true}, "$unset": bson.M{"status": ""}}) + } else { + updateModel.SetUpdate(bson.M{"$set": bson.M{"confirmed": false}, "$unset": bson.M{"status": ""}}) + } + + updateModels = append(updateModels, updateModel) + } + + if len(updateModels) == 0 { + return nil + } + + _, err = db.Collection("users").BulkWrite(ctx, updateModels) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_75_test.go b/api/store/mongo/migrations/migration_75_test.go new file mode 100644 index 00000000000..f35747b7616 --- /dev/null +++ b/api/store/mongo/migrations/migration_75_test.go @@ -0,0 +1,162 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration75Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func(primitive.ObjectID) error + expected string + }{ + { + description: "Success to apply up on migration 75 with confirmed == true", + setup: func(objID primitive.ObjectID) error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "_id": objID, + "confirmed": true, + }) + + return err + }, + expected: models.UserStatusConfirmed.String(), + }, + { + description: "Success to apply up on migration 75 with confirmed == false", + setup: func(objID primitive.ObjectID) error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "_id": objID, + "confirmed": false, + }) + + return err + }, + expected: models.UserStatusNotConfirmed.String(), + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + objID := primitive.NewObjectID() + assert.NoError(tt, tc.setup(objID)) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[74]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"_id": objID}) + + user := make(map[string]interface{}) + require.NoError(tt, query.Decode(&user)) + + _, ok := user["confirmed"] + require.Equal(tt, false, ok) + + status, ok := user["status"].(string) + require.Equal(tt, true, ok) + require.Equal(tt, tc.expected, status) + }) + } +} + +func TestMigration75Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func(primitive.ObjectID) error + expected bool + }{ + { + description: "Success to apply up on migration 75 with status confirmed", + setup: func(objID primitive.ObjectID) error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "_id": objID, + "status": models.UserStatusConfirmed.String(), + }) + + return err + }, + expected: true, + }, + { + description: "Success to apply up on migration 75 with status unconfirmed", + setup: func(objID primitive.ObjectID) error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "_id": objID, + "status": models.UserStatusNotConfirmed.String(), + }) + + return err + }, + expected: false, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + objID := primitive.NewObjectID() + assert.NoError(tt, tc.setup(objID)) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[74]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(tt, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"_id": objID}) + + user := make(map[string]interface{}) + require.NoError(tt, query.Decode(&user)) + + _, ok := user["status"] + require.Equal(tt, false, ok) + + confirmed, ok := user["confirmed"].(bool) + require.Equal(tt, true, ok) + require.Equal(tt, tc.expected, confirmed) + }) + } +} diff --git a/api/store/mongo/migrations/migration_76.go b/api/store/mongo/migrations/migration_76.go new file mode 100644 index 00000000000..6254185676f --- /dev/null +++ b/api/store/mongo/migrations/migration_76.go @@ -0,0 +1,87 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration76 = migrate.Migration{ + Version: 76, + Description: "Remove user.namespace from users collection.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 76). + WithField("action", " Up"). + Info("Applying migration") + + filter := bson.M{"namespaces": bson.M{"$exists": true}} + update := bson.M{"$unset": bson.M{"namespaces": ""}} + + _, err := db.Collection("users").UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 76). + WithField("action", "Down"). + Info("Applying migration") + + filter := []bson.M{ + { + "$match": bson.M{ + "namespaces": bson.M{ + "$exists": false, + }, + }, + }, + } + + cursor, err := db.Collection("users").Aggregate(ctx, filter) + if err != nil { + return err + } + defer cursor.Close(ctx) + + updateModels := make([]mongo.WriteModel, 0) + for cursor.Next(ctx) { + user := new(models.User) + if err := cursor.Decode(user); err != nil { + return err + } + + cursor, err := db.Collection("namespaces").Find(ctx, bson.M{"members": bson.M{"$elemMatch": bson.M{"id": user.ID, "role": "owner"}}}) + if err != nil { + return err + } + defer cursor.Close(ctx) + + namespaces := make([]models.Namespace, 0) + if err := cursor.All(ctx, &namespaces); err != nil { + continue + } + + userID, _ := primitive.ObjectIDFromHex(user.ID) + updateModel := mongo. + NewUpdateOneModel(). + SetFilter(bson.M{"_id": userID}). + SetUpdate(bson.M{"$set": bson.M{"namespaces": len(namespaces)}}) + + updateModels = append(updateModels, updateModel) + } + + if len(updateModels) == 0 { + return nil + } + + _, err = db.Collection("users").BulkWrite(ctx, updateModels) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_76_test.go b/api/store/mongo/migrations/migration_76_test.go new file mode 100644 index 00000000000..b8c7232e09b --- /dev/null +++ b/api/store/mongo/migrations/migration_76_test.go @@ -0,0 +1,151 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration76Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func(primitive.ObjectID) error + }{ + { + description: "Success to apply up on migration 76", + setup: func(objID primitive.ObjectID) error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "_id": objID, + "namespaces": 1, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + objID := primitive.NewObjectID() + assert.NoError(tt, tc.setup(objID)) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[75]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"_id": objID}) + + user := make(map[string]interface{}) + require.NoError(tt, query.Decode(&user)) + + _, ok := user["namespaces"] + require.Equal(tt, false, ok) + }) + } +} + +func TestMigration76Down(t *testing.T) { + db := c.Database("test") + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func(primitive.ObjectID) error + expected int32 + }{ + { + description: "Success to apply up on migration 76", + setup: func(objID primitive.ObjectID) error { + _, err := db.Collection("users").InsertOne(ctx, map[string]interface{}{"_id": objID}) + if err != nil { + return err + } + + namespaces := []map[string]interface{}{ + { + "_id": primitive.NewObjectID(), + "members": []bson.M{ + {"id": objID.Hex(), "role": "owner"}, + {"id": "000000000000000000000000", "role": "observer"}, + }, + }, + { + "_id": primitive.NewObjectID(), + "members": []bson.M{ + {"id": objID.Hex(), "role": "owner"}, + {"id": "000000000000000000000000", "role": "observer"}, + }, + }, + { + "_id": primitive.NewObjectID(), + "members": []bson.M{ + {"id": objID.Hex(), "role": "observer"}, + {"id": "000000000000000000000000", "role": "owner"}, + }, + }, + } + + for _, n := range namespaces { + _, err = db.Collection("namespaces").InsertOne(ctx, n) + if err != nil { + return err + } + } + + return nil + }, + expected: 2, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + objID := primitive.NewObjectID() + assert.NoError(tt, tc.setup(objID)) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[75]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(tt, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"_id": objID}) + + user := make(map[string]interface{}) + require.NoError(tt, query.Decode(&user)) + + count, ok := user["namespaces"] + require.Equal(tt, true, ok) + require.Equal(tt, tc.expected, count.(int32)) + }) + } +} diff --git a/api/store/mongo/migrations/migration_77.go b/api/store/mongo/migrations/migration_77.go new file mode 100644 index 00000000000..88c88216a49 --- /dev/null +++ b/api/store/mongo/migrations/migration_77.go @@ -0,0 +1,43 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration77 = migrate.Migration{ + Version: 77, + Description: "Recreate the unique index on the 'username' field in the 'users' collection with a partial filter for documents where the 'username' field is a string.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 77). + WithField("action", " Up"). + Info("Applying migration") + + _, _ = db.Collection("users").Indexes().DropOne(ctx, "username") + + indexModel := mongo.IndexModel{ + Keys: bson.M{"username": 1}, + Options: options.Index().SetName("username").SetUnique(true).SetPartialFilterExpression(bson.M{"username": bson.M{"$type": "string"}}), + } + + _, err := db.Collection("users").Indexes().CreateOne(ctx, indexModel) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 77). + WithField("action", "Down"). + Info("Reverting migration") + + _, err := db.Collection("users").Indexes().DropOne(ctx, "username") + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_77_test.go b/api/store/mongo/migrations/migration_77_test.go new file mode 100644 index 00000000000..1c77b9456c5 --- /dev/null +++ b/api/store/mongo/migrations/migration_77_test.go @@ -0,0 +1,59 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" +) + +func TestMigration77Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + }{ + { + description: "Success to apply up on migration 77", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "username": nil, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[76]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "username": nil, + }) + require.NoError(tt, err) + }) + } +} diff --git a/api/store/mongo/migrations/migration_78.go b/api/store/mongo/migrations/migration_78.go new file mode 100644 index 00000000000..5a4c848272b --- /dev/null +++ b/api/store/mongo/migrations/migration_78.go @@ -0,0 +1,43 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration78 = migrate.Migration{ + Version: 78, + Description: "Recreate the unique index on the 'email' field in the 'users' collection with a partial filter for documents where the 'email' field is a string.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 78). + WithField("action", " Up"). + Info("Applying migration") + + _, _ = db.Collection("users").Indexes().DropOne(ctx, "email") + + indexModel := mongo.IndexModel{ + Keys: bson.M{"email": 1}, + Options: options.Index().SetName("email").SetUnique(true).SetPartialFilterExpression(bson.M{"email": bson.M{"$type": "string"}}), + } + + _, err := db.Collection("users").Indexes().CreateOne(ctx, indexModel) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 78). + WithField("action", "Down"). + Info("Reverting migration") + + _, err := db.Collection("users").Indexes().DropOne(ctx, "email") + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_78_test.go b/api/store/mongo/migrations/migration_78_test.go new file mode 100644 index 00000000000..84e6ca8edba --- /dev/null +++ b/api/store/mongo/migrations/migration_78_test.go @@ -0,0 +1,59 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" +) + +func TestMigration78Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + }{ + { + description: "Success to apply up on migration 78", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "email": nil, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[77]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "email": nil, + }) + require.NoError(tt, err) + }) + } +} diff --git a/api/store/mongo/migrations/migration_79.go b/api/store/mongo/migrations/migration_79.go new file mode 100644 index 00000000000..9f88e04738e --- /dev/null +++ b/api/store/mongo/migrations/migration_79.go @@ -0,0 +1,49 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/envs" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration79 = migrate.Migration{ + Version: 79, + Description: "create and populate the system collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 79). + WithField("action", " Up"). + Info("Applying migration") + + if err := db.CreateCollection(ctx, "system"); err != nil { + return err + } + + if envs.IsCommunity() { + users, err := db.Collection("users").CountDocuments(ctx, bson.M{}) + if err != nil { + return err + } + + if _, err := db.Collection("system").InsertOne(ctx, bson.M{ + "setup": users > 0, + }); err != nil { + return err + } + } + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 79). + WithField("action", "Down"). + Info("Reverting migration") + + return db.Collection("system").Drop(ctx) + }), +} diff --git a/api/store/mongo/migrations/migration_79_test.go b/api/store/mongo/migrations/migration_79_test.go new file mode 100644 index 00000000000..6d541e7ff23 --- /dev/null +++ b/api/store/mongo/migrations/migration_79_test.go @@ -0,0 +1,79 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration79(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + mock.On("Get", "SHELLHUB_CLOUD").Return("false") + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("false") + + tests := []struct { + description string + setup func(t *testing.T) + run func(t *testing.T) + }{ + { + description: "Apply up on migration 79 when there is no user", + setup: func(_ *testing.T) {}, + run: func(t *testing.T) { + result := c.Database("test").Collection("system").FindOne(ctx, bson.M{}) + require.NoError(t, result.Err()) + + var system models.System + + err := result.Decode(&system) + require.NoError(t, err) + + assert.Equal(t, false, system.Setup) + }, + }, + { + description: "Apply up on migration 79 when there is at least one user", + setup: func(t *testing.T) { + _, err := c.Database("test").Collection("users").InsertOne(ctx, models.User{}) + require.NoError(t, err) + }, + run: func(t *testing.T) { + result := c.Database("test").Collection("system").FindOne(ctx, bson.M{}) + require.NoError(t, result.Err()) + + var system models.System + + err := result.Decode(&system) + require.NoError(t, err) + + assert.Equal(t, true, system.Setup) + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + test.setup(tt) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[78]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + test.run(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_7_test.go b/api/store/mongo/migrations/migration_7_test.go index e1433acb4b4..6a837cd6e9b 100644 --- a/api/store/mongo/migrations/migration_7_test.go +++ b/api/store/mongo/migrations/migration_7_test.go @@ -4,21 +4,18 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration7(t *testing.T) { - logrus.Info("Testing Migration 7 - Test if the uid and message is not unique") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:6]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:6]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) recordedSession1 := models.RecordedSession{ @@ -31,13 +28,13 @@ func TestMigration7(t *testing.T) { Message: "message", } - _, err = db.Client().Database("test").Collection("recorded_sessions").InsertOne(context.TODO(), recordedSession1) + _, err = c.Database("test").Collection("recorded_sessions").InsertOne(context.TODO(), recordedSession1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("recorded_sessions").InsertOne(context.TODO(), recordedSession2) + _, err = c.Database("test").Collection("recorded_sessions").InsertOne(context.TODO(), recordedSession2) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:7]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:7]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_8.go b/api/store/mongo/migrations/migration_8.go index b96f04c34d2..2d9b7bdb96b 100644 --- a/api/store/mongo/migrations/migration_8.go +++ b/api/store/mongo/migrations/migration_8.go @@ -13,7 +13,7 @@ import ( var migration8 = migrate.Migration{ Version: 8, Description: "Unset unique on recorded in the sessions collection", - Up: func(db *mongo.Database) error { + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 8, @@ -23,24 +23,24 @@ var migration8 = migrate.Migration{ Keys: bson.D{{"recorded", 1}}, Options: options.Index().SetName("recorded").SetUnique(false), } - if _, err := db.Collection("sessions").Indexes().CreateOne(context.TODO(), mod); err != nil { + if _, err := db.Collection("sessions").Indexes().CreateOne(ctx, mod); err != nil { return err } - _, err := db.Collection("sessions").UpdateMany(context.TODO(), bson.M{}, bson.M{"$set": bson.M{"recorded": false}}) + _, err := db.Collection("sessions").UpdateMany(ctx, bson.M{}, bson.M{"$set": bson.M{"recorded": false}}) return err - }, - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 8, "action": "Down", }).Info("Applying migration") - if _, err := db.Collection("sessions").UpdateMany(context.TODO(), bson.M{}, bson.M{"$unset": bson.M{"recorded": ""}}); err != nil { + if _, err := db.Collection("sessions").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"recorded": ""}}); err != nil { return err } - _, err := db.Collection("sessions").Indexes().DropOne(context.TODO(), "recorded") + _, err := db.Collection("sessions").Indexes().DropOne(ctx, "recorded") return err - }, + }), } diff --git a/api/store/mongo/migrations/migration_80.go b/api/store/mongo/migrations/migration_80.go new file mode 100644 index 00000000000..40b8d5bbb2f --- /dev/null +++ b/api/store/mongo/migrations/migration_80.go @@ -0,0 +1,41 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration80 = migrate.Migration{ + Version: 80, + Description: "Remove the 'message' index from the 'recorded_sessions' collection.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 80). + WithField("action", " Up"). + Info("Applying migration") + + _, err := db.Collection("recorded_sessions").Indexes().DropOne(ctx, "message") + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 80). + WithField("action", "Down"). + Info("Applying migration") + + index := mongo.IndexModel{ + Keys: bson.D{{Key: "message", Value: 1}}, + Options: options.Index().SetName("message").SetUnique(false), + } + + _, err := db.Collection("recorded_sessions").Indexes().CreateOne(ctx, index) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_81.go b/api/store/mongo/migrations/migration_81.go new file mode 100644 index 00000000000..0444a6e748b --- /dev/null +++ b/api/store/mongo/migrations/migration_81.go @@ -0,0 +1,41 @@ +package migrations + +import ( + "context" + + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration81 = migrate.Migration{ + Version: 81, + Description: "Create a 'time' index in the 'recorded_sessions' collection.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 81). + WithField("action", " Up"). + Info("Applying migration") + + index := mongo.IndexModel{ + Keys: bson.D{{Key: "time", Value: 1}}, + Options: options.Index().SetName("time").SetUnique(false), + } + + _, err := db.Collection("recorded_sessions").Indexes().CreateOne(ctx, index) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithField("component", "migration"). + WithField("version", 81). + WithField("action", "Down"). + Info("Applying migration") + + _, err := db.Collection("recorded_sessions").Indexes().DropOne(ctx, "time") + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_82.go b/api/store/mongo/migrations/migration_82.go new file mode 100644 index 00000000000..d841e06af9f --- /dev/null +++ b/api/store/mongo/migrations/migration_82.go @@ -0,0 +1,68 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.mongodb.org/mongo-driver/mongo/writeconcern" +) + +var migration82 = migrate.Migration{ + Version: 82, + Description: "Adding the 'namespaces.type' attribute to the namespaces if it does not already exist.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 82, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "type": bson.M{"$in": []interface{}{nil, ""}}, + } + + update := bson.M{ + "$set": bson.M{ + "type": models.TypeTeam, + }, + } + + _, err := db. + Collection("namespaces", + options.Collection().SetWriteConcern(writeconcern.Majority()), + ). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 82, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "type": bson.M{"$in": []interface{}{nil, ""}}, + } + + update := bson.M{ + "$unset": bson.M{ + "type": models.TypeTeam, + }, + } + + _, err := db. + Collection("namespaces", + options.Collection().SetWriteConcern(writeconcern.Majority()), + ). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_82_test.go b/api/store/mongo/migrations/migration_82_test.go new file mode 100644 index 00000000000..e0b1dbed382 --- /dev/null +++ b/api/store/mongo/migrations/migration_82_test.go @@ -0,0 +1,78 @@ +package migrations + +import ( + "context" + "errors" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envMocks "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration82(t *testing.T) { + ctx := context.Background() + + mock := &envMocks.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 82", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Settings: &models.NamespaceSettings{}, + }) + + return err + }, + test: func() error { + migrations := GenerateMigrations()[81:82] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + query := c. + Database("test"). + Collection("namespaces"). + FindOne(context.TODO(), bson.M{"tenant_id": "00000000-0000-4000-0000-000000000000"}) + + ns := new(models.Namespace) + if err := query.Decode(ns); err != nil { + return errors.New("unable to find the namespace") + } + + if ns.Type != models.TypeTeam { + return errors.New("unable to apply the migration") + } + + return nil + }, + }, + } + + for _, test := range cases { + tc := test + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + assert.NoError(t, tc.test()) + }) + } +} diff --git a/api/store/mongo/migrations/migration_83.go b/api/store/mongo/migrations/migration_83.go new file mode 100644 index 00000000000..40ffe1f6e83 --- /dev/null +++ b/api/store/mongo/migrations/migration_83.go @@ -0,0 +1,58 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration83 = migrate.Migration{ + Version: 83, + Description: "Set the user's 'origin' attribute to 'manual' if it does not already exist.", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 83, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "origin": bson.M{"$exists": false}, + } + + update := bson.M{ + "$set": bson.M{ + "origin": models.UserOriginLocal.String(), + }, + } + + _, err := db.Collection("users").UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 83, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "origin": bson.M{"$exists": true}, + } + + update := bson.M{ + "$unset": bson.M{ + "origin": "", + }, + } + + _, err := db.Collection("users").UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_83_test.go b/api/store/mongo/migrations/migration_83_test.go new file mode 100644 index 00000000000..528efd2b83b --- /dev/null +++ b/api/store/mongo/migrations/migration_83_test.go @@ -0,0 +1,119 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration83Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 83", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[82]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(tt, query.Decode(&user)) + + v, ok := user["origin"] + require.Equal(tt, true, ok) + require.Equal(tt, v, models.UserOriginLocal.String()) + }) + } +} + +func TestMigration83Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 83", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + "origin": models.UserOriginLocal.String(), + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[82]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(t, query.Decode(&user)) + + _, ok := user["origin"] + require.Equal(t, false, ok) + }) + } +} diff --git a/api/store/mongo/migrations/migration_84.go b/api/store/mongo/migrations/migration_84.go new file mode 100644 index 00000000000..b9d11306663 --- /dev/null +++ b/api/store/mongo/migrations/migration_84.go @@ -0,0 +1,48 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration84 = migrate.Migration{ + Version: 84, + Description: "create index for sessions' type", + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 84, + "action": "Up", + }).Info("Applying migration up") + name := "events.types" + if _, err := db.Collection("sessions").Indexes().CreateOne(context.Background(), mongo.IndexModel{ + Keys: bson.M{ + "events.types": 1, + }, + Options: &options.IndexOptions{ //nolint:exhaustruct + Name: &name, + }, + }); err != nil { + return err + } + + return nil + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 84, + "action": "Down", + }).Info("Applying migration down") + if _, err := db.Collection("sessions").Indexes().DropOne(context.Background(), "events.types"); err != nil { + return err + } + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_84_test.go b/api/store/mongo/migrations/migration_84_test.go new file mode 100644 index 00000000000..76e84b5fb32 --- /dev/null +++ b/api/store/mongo/migrations/migration_84_test.go @@ -0,0 +1,99 @@ +package migrations + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/assert" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration84(t *testing.T) { + cases := []struct { + description string + test func() error + }{ + { + "Success to apply up on migration 84", + func() error { + migrations := GenerateMigrations()[83:84] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + cursor, err := c.Database("test").Collection("sessions").Indexes().List(context.Background()) + if err != nil { + return err + } + + var found bool + for cursor.Next(context.Background()) { + var index bson.M + if err := cursor.Decode(&index); err != nil { + return err + } + + if index["name"] == "events.types" { + found = true + } + } + + if !found { + return errors.New("index not created") + } + + return nil + }, + }, + { + "Success to apply down on migration 84", + func() error { + migrations := GenerateMigrations()[83:84] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + cursor, err := c.Database("test").Collection("sessions").Indexes().List(context.Background()) + if err != nil { + return errors.New("index not dropped") + } + + var found bool + for cursor.Next(context.Background()) { + var index bson.M + if err := cursor.Decode(&index); err != nil { + return err + } + + if index["name"] == "events.types" { + found = true + } + } + + if found { + return errors.New("index not dropped") + } + + return nil + }, + }, + } + + for _, test := range cases { + tc := test + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + err := tc.test() + assert.NoError(t, err) + }) + } +} diff --git a/api/store/mongo/migrations/migration_85.go b/api/store/mongo/migrations/migration_85.go new file mode 100644 index 00000000000..c3a7609e283 --- /dev/null +++ b/api/store/mongo/migrations/migration_85.go @@ -0,0 +1,48 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration85 = migrate.Migration{ + Version: 85, + Description: "create index for tunnels address", + Up: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 85, + "action": "Up", + }).Info("Applying migration up") + name := "address" + if _, err := db.Collection("tunnels").Indexes().CreateOne(context.Background(), mongo.IndexModel{ + Keys: bson.M{ + "address": 1, + }, + Options: &options.IndexOptions{ //nolint:exhaustruct + Name: &name, + }, + }); err != nil { + return err + } + + return nil + }), + Down: migrate.MigrationFunc(func(_ context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 85, + "action": "Down", + }).Info("Applying migration down") + if _, err := db.Collection("tunnels").Indexes().DropOne(context.Background(), "address"); err != nil { + return err + } + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_85_test.go b/api/store/mongo/migrations/migration_85_test.go new file mode 100644 index 00000000000..b9e3790d326 --- /dev/null +++ b/api/store/mongo/migrations/migration_85_test.go @@ -0,0 +1,99 @@ +package migrations + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/assert" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration85(t *testing.T) { + cases := []struct { + description string + test func() error + }{ + { + "Success to apply up on migration 85", + func() error { + migrations := GenerateMigrations()[84:85] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Up(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + cursor, err := c.Database("test").Collection("tunnels").Indexes().List(context.Background()) + if err != nil { + return err + } + + var found bool + for cursor.Next(context.Background()) { + var index bson.M + if err := cursor.Decode(&index); err != nil { + return err + } + + if index["name"] == "address" { + found = true + } + } + + if !found { + return errors.New("index not created") + } + + return nil + }, + }, + { + "Success to apply down on migration 85", + func() error { + migrations := GenerateMigrations()[83:85] + migrates := migrate.NewMigrate(c.Database("test"), migrations...) + err := migrates.Down(context.Background(), migrate.AllAvailable) + if err != nil { + return err + } + + cursor, err := c.Database("test").Collection("tunnels").Indexes().List(context.Background()) + if err != nil { + return errors.New("index not dropped") + } + + var found bool + for cursor.Next(context.Background()) { + var index bson.M + if err := cursor.Decode(&index); err != nil { + return err + } + + if index["name"] == "address" { + found = true + } + } + + if found { + return errors.New("index not dropped") + } + + return nil + }, + }, + } + + for _, test := range cases { + tc := test + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + err := tc.test() + assert.NoError(t, err) + }) + } +} diff --git a/api/store/mongo/migrations/migration_86.go b/api/store/mongo/migrations/migration_86.go new file mode 100644 index 00000000000..5eb3764215b --- /dev/null +++ b/api/store/mongo/migrations/migration_86.go @@ -0,0 +1,62 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration86 = migrate.Migration{ + Version: 86, + Description: "Adding an 'auth_methods' attributes to user collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 86, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "preferences.auth_methods": bson.M{"$exists": false}, + } + + update := bson.M{ + "$set": bson.M{ + "preferences.auth_methods": []string{models.UserAuthMethodLocal.String()}, + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 86, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "preferences.auth_methods": bson.M{"$exists": true}, + } + + update := bson.M{ + "$unset": bson.M{ + "preferences.auth_methods": "", + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_86_test.go b/api/store/mongo/migrations/migration_86_test.go new file mode 100644 index 00000000000..b332186d607 --- /dev/null +++ b/api/store/mongo/migrations/migration_86_test.go @@ -0,0 +1,122 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration86Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 86", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + "preferences": map[string]string{}, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[85]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(tt, query.Decode(&user)) + + v, ok := user["preferences"].(map[string]interface{})["auth_methods"] + require.Equal(tt, true, ok) + require.Equal(tt, primitive.A{"local"}, v) + }) + } +} + +func TestMigration86Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply down on migration 86", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + "preferences": map[string]interface{}{ + "auth_methods": []string{"some_method"}, + }, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[85]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(t, query.Decode(&user)) + + _, ok := user["preferences"].(map[string]interface{})["auth_methods"] + require.Equal(t, false, ok) + }) + } +} diff --git a/api/store/mongo/migrations/migration_87.go b/api/store/mongo/migrations/migration_87.go new file mode 100644 index 00000000000..e499b8207ee --- /dev/null +++ b/api/store/mongo/migrations/migration_87.go @@ -0,0 +1,71 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration87 = migrate.Migration{ + Version: 87, + Description: "Adding an 'authentication' attributes to system collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 87, + "action": "Up", + }).Info("Applying migration") + + if count, _ := db.Collection("system").CountDocuments(ctx, bson.M{}); count == 0 { + if _, err := db.Collection("system").InsertOne(ctx, bson.M{"setup": true}); err != nil { + return err + } + } + + filter := bson.M{ + "authentication": bson.M{"$exists": false}, + } + + update := bson.M{ + "$set": bson.M{ + "authentication": bson.M{ + "local": bson.M{ + "enabled": true, + }, + }, + }, + } + + _, err := db. + Collection("system"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 87, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "authentication": bson.M{"$exists": true}, + } + + update := bson.M{ + "$unset": bson.M{ + "authentication": "", + }, + } + + _, err := db. + Collection("system"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_87_test.go b/api/store/mongo/migrations/migration_87_test.go new file mode 100644 index 00000000000..baa31524081 --- /dev/null +++ b/api/store/mongo/migrations/migration_87_test.go @@ -0,0 +1,119 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration87Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + mock.On("Get", "SHELLHUB_CLOUD").Return("false") + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("false") + + tests := []struct { + description string + setup func() error + }{ + { + description: "Apply up on migration 87", + setup: func() error { + _, err := c. + Database("test"). + Collection("system"). + InsertOne(ctx, map[string]interface{}{}) + + return err + }, + }, + } + + for _, tc := range tests { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[86]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("system"). + FindOne(context.TODO(), bson.M{}) + + system := make(map[string]interface{}) + require.NoError(tt, query.Decode(&system)) + + authentication, ok := system["authentication"].(map[string]interface{}) + require.Equal(tt, true, ok) + require.Equal(tt, map[string]interface{}{"local": map[string]interface{}{"enabled": true}}, authentication) + }) + } +} + +func TestMigration87Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + mock.On("Get", "SHELLHUB_CLOUD").Return("false") + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("false") + + tests := []struct { + description string + setup func() error + }{ + { + description: "Apply up on migration 87", + setup: func() error { + _, err := c. + Database("test"). + Collection("system"). + InsertOne(ctx, map[string]interface{}{ + "authentication": "some_value", + }) + + return err + }, + }, + } + + for _, tc := range tests { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[86]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(tt, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("system"). + FindOne(context.TODO(), bson.M{}) + + system := make(map[string]interface{}) + require.NoError(tt, query.Decode(&system)) + + _, ok := system["authentication"] + require.Equal(tt, false, ok) + }) + } +} diff --git a/api/store/mongo/migrations/migration_88.go b/api/store/mongo/migrations/migration_88.go new file mode 100644 index 00000000000..055cf181098 --- /dev/null +++ b/api/store/mongo/migrations/migration_88.go @@ -0,0 +1,73 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration88 = migrate.Migration{ + Version: 88, + Description: "Adding an 'authentication.saml' attributes to system collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 88, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "authentication.saml": bson.M{"$exists": false}, + } + + update := bson.M{ + "$set": bson.M{ + "authentication.saml": bson.M{ + "enabled": false, + "idp": bson.M{ + "entity_id": "", + "signon_url": "", + "certificates": []string{}, + }, + "sp": bson.M{ + "sign_auth_requests": false, + "certificate": "", + "private_key": "", + }, + }, + }, + } + + _, err := db. + Collection("system"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 88, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "authentication.saml": bson.M{"$exists": true}, + } + + update := bson.M{ + "$unset": bson.M{ + "authentication.saml": "", + }, + } + + _, err := db. + Collection("system"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_88_test.go b/api/store/mongo/migrations/migration_88_test.go new file mode 100644 index 00000000000..76ac5bd5e27 --- /dev/null +++ b/api/store/mongo/migrations/migration_88_test.go @@ -0,0 +1,136 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration88Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + tests := []struct { + description string + setup func() error + }{ + { + description: "Apply up on migration 88", + setup: func() error { + _, err := c. + Database("test"). + Collection("system"). + InsertOne(ctx, map[string]interface{}{ + "authentication": map[string]interface{}{ + "local": map[string]interface{}{ + "enabled": true, + }, + }, + }) + + return err + }, + }, + } + + for _, tc := range tests { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[87]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("system"). + FindOne(context.TODO(), bson.M{}) + + system := make(map[string]interface{}) + require.NoError(tt, query.Decode(&system)) + + saml, ok := system["authentication"].(map[string]interface{})["saml"].(map[string]interface{}) + require.Equal(tt, true, ok) + + enabled, ok := saml["enabled"] + require.Equal(tt, true, ok) + require.Equal(tt, false, enabled) + + idp, ok := saml["idp"].(map[string]interface{}) + require.Equal(tt, true, ok) + require.Equal(tt, map[string]interface{}{"entity_id": "", "signon_url": "", "certificates": primitive.A{}}, idp) + + sp, ok := saml["sp"].(map[string]interface{}) + require.Equal(tt, true, ok) + require.Equal(tt, map[string]interface{}{"sign_auth_requests": false, "certificate": "", "private_key": ""}, sp) + }) + } +} + +func TestMigration88Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + mock.On("Get", "SHELLHUB_CLOUD").Return("false") + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("false") + + tests := []struct { + description string + setup func() error + }{ + { + description: "Apply up on migration 88", + setup: func() error { + _, err := c. + Database("test"). + Collection("system"). + InsertOne(ctx, map[string]interface{}{ + "authentication": map[string]interface{}{ + "local": true, + }, + }) + + return err + }, + }, + } + + for _, tc := range tests { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[87]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(tt, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("system"). + FindOne(context.TODO(), bson.M{}) + + system := make(map[string]interface{}) + require.NoError(tt, query.Decode(&system)) + + _, ok := system["authentication"].(map[string]interface{})["saml"] + require.Equal(tt, false, ok) + }) + } +} diff --git a/api/store/mongo/migrations/migration_89.go b/api/store/mongo/migrations/migration_89.go new file mode 100644 index 00000000000..93ec772a153 --- /dev/null +++ b/api/store/mongo/migrations/migration_89.go @@ -0,0 +1,61 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration89 = migrate.Migration{ + Version: 89, + Description: "Adding an external ID attribute to users collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 89, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "external_id": bson.M{"$exists": false}, + } + + update := bson.M{ + "$set": bson.M{ + "external_id": "", + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 89, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{ + "external_id": bson.M{"$exists": true}, + } + + update := bson.M{ + "$unset": bson.M{ + "external_id": "", + }, + } + + _, err := db. + Collection("users"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_89_test.go b/api/store/mongo/migrations/migration_89_test.go new file mode 100644 index 00000000000..5ccc5ca0fd3 --- /dev/null +++ b/api/store/mongo/migrations/migration_89_test.go @@ -0,0 +1,117 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration89Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 89", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[88]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(tt, query.Decode(&user)) + + _, ok := user["external_id"] + require.Equal(tt, true, ok) + }) + } +} + +func TestMigration89Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + test func() error + }{ + { + description: "Success to apply up on migration 89", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, map[string]interface{}{ + "name": "john doe", + "external_id": "unique_string", + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + assert.NoError(t, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[88]) + require.NoError(t, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(t, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("users"). + FindOne(context.TODO(), bson.M{"name": "john doe"}) + + user := make(map[string]interface{}) + require.NoError(t, query.Decode(&user)) + + _, ok := user["external_id"] + require.Equal(t, false, ok) + }) + } +} diff --git a/api/store/mongo/migrations/migration_8_test.go b/api/store/mongo/migrations/migration_8_test.go index 57b61bd5b4b..d35ae5a5811 100644 --- a/api/store/mongo/migrations/migration_8_test.go +++ b/api/store/mongo/migrations/migration_8_test.go @@ -4,21 +4,18 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" ) func TestMigration8(t *testing.T) { - logrus.Info("Testing Migration 8 - Test if the recorded is not unique") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:7]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:7]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) session1 := models.Session{ @@ -29,13 +26,13 @@ func TestMigration8(t *testing.T) { Recorded: true, } - _, err = db.Client().Database("test").Collection("sessions").InsertOne(context.TODO(), session1) + _, err = c.Database("test").Collection("sessions").InsertOne(context.TODO(), session1) assert.NoError(t, err) - _, err = db.Client().Database("test").Collection("sessions").InsertOne(context.TODO(), session2) + _, err = c.Database("test").Collection("sessions").InsertOne(context.TODO(), session2) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:8]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:8]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) } diff --git a/api/store/mongo/migrations/migration_9.go b/api/store/mongo/migrations/migration_9.go index 1d0823183a8..714003fff9c 100644 --- a/api/store/mongo/migrations/migration_9.go +++ b/api/store/mongo/migrations/migration_9.go @@ -13,19 +13,19 @@ import ( var migration9 = migrate.Migration{ Version: 9, - Description: "Set all devices names to lowercase in the devices colletion", - Up: func(db *mongo.Database) error { + Description: "Set all devices names to lowercase in the devices collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 9, "action": "Up", }).Info("Applying migration") - cursor, err := db.Collection("devices").Find(context.TODO(), bson.D{}) + cursor, err := db.Collection("devices").Find(ctx, bson.D{}) if err != nil { return err } - defer cursor.Close(context.TODO()) - for cursor.Next(context.TODO()) { + defer cursor.Close(ctx) + for cursor.Next(ctx) { device := new(models.Device) err := cursor.Decode(&device) if err != nil { @@ -33,15 +33,14 @@ var migration9 = migrate.Migration{ } device.Name = strings.ToLower(device.Name) - if _, err = db.Collection("devices").UpdateOne(context.TODO(), bson.M{"uid": device.UID}, bson.M{"$set": bson.M{"name": strings.ToLower(device.Name)}}); err != nil { + if _, err = db.Collection("devices").UpdateOne(ctx, bson.M{"uid": device.UID}, bson.M{"$set": bson.M{"name": strings.ToLower(device.Name)}}); err != nil { return err } } return nil - }, - - Down: func(db *mongo.Database) error { + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { logrus.WithFields(logrus.Fields{ "component": "migration", "version": 9, @@ -49,5 +48,5 @@ var migration9 = migrate.Migration{ }).Info("Applying migration") return nil - }, + }), } diff --git a/api/store/mongo/migrations/migration_90.go b/api/store/mongo/migrations/migration_90.go new file mode 100644 index 00000000000..a7227ad0a6b --- /dev/null +++ b/api/store/mongo/migrations/migration_90.go @@ -0,0 +1,65 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration90 = migrate.Migration{ + Version: 90, + Description: "Add events field on sessions", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 90, + "action": "Up", + }).Info("Applying migration") + + filter := bson.M{ + "$or": []bson.M{ + {"events": bson.M{"$exists": false}}, + {"events": bson.M{"$eq": bson.M{}}}, + }, + } + + update := bson.M{ + "$set": bson.M{ + "events": bson.M{ + "types": bson.A{}, + "items": bson.A{}, + }, + }, + } + + _, err := db. + Collection("sessions"). + UpdateMany(ctx, filter, update) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 90, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{} + + update := bson.M{ + "$unset": bson.M{ + "events": "", + }, + } + + _, err := db. + Collection("sessions"). + UpdateMany(ctx, filter, update) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_90_test.go b/api/store/mongo/migrations/migration_90_test.go new file mode 100644 index 00000000000..65fab2cbfaf --- /dev/null +++ b/api/store/mongo/migrations/migration_90_test.go @@ -0,0 +1,120 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration90Up(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + }{ + { + description: "Success to apply up on migration 90", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, map[string]interface{}{ + "uid": "test", + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[89]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{ + "uid": "test", + }) + + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + + require.Contains(tt, session, "events") + require.Contains(tt, session["events"], "types") + require.Contains(tt, session["events"], "items") + }) + } +} + +func TestMigration90Down(t *testing.T) { + ctx := context.Background() + + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + }{ + { + description: "Success to revert migration 90", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, models.Session{ + Events: models.SessionEvents{ + Types: []string{}, + Seats: []int{0}, + }, + }) + + return err + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + assert.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[89]) + require.NoError(tt, migrates.Up(context.Background(), migrate.AllAvailable)) + require.NoError(tt, migrates.Down(context.Background(), migrate.AllAvailable)) + + query := c. + Database("test"). + Collection("sessions"). + FindOne(context.TODO(), bson.M{}) + + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + + require.NotContains(tt, session, "events") + }) + } +} diff --git a/api/store/mongo/migrations/migration_91.go b/api/store/mongo/migrations/migration_91.go new file mode 100644 index 00000000000..827b776bb75 --- /dev/null +++ b/api/store/mongo/migrations/migration_91.go @@ -0,0 +1,105 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration91 = migrate.Migration{ + Version: 91, + Description: "Add sessions_events collections", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 91, + "action": "Up", + }).Info("Applying migration") + + sessionIndex := mongo.IndexModel{ + Keys: bson.M{ + "session": 1, + }, + } + + if _, err := db.Collection("sessions_events").Indexes().CreateOne(ctx, sessionIndex); err != nil { + return err + } + + cursor, err := db.Collection("sessions").Find(ctx, bson.M{"events.items": bson.M{"$exists": true}}) + if err != nil { + return err + } + + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + var session struct { + UID string `bson:"uid"` + Events struct { + Items []models.SessionEvent `bson:"items"` + } `bson:"events"` + } + + if err := cursor.Decode(&session); err != nil { + return err + } + + for _, event := range session.Events.Items { + event.Session = session.UID + if _, err := db.Collection("sessions_events").InsertOne(ctx, event); err != nil { + return err + } + } + + if _, err := db.Collection("sessions").UpdateOne(ctx, bson.M{ + "uid": session.UID, + }, bson.M{ + "$unset": bson.M{ + "events.items": "", + }, + }); err != nil { + return err + } + } + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 91, + "action": "Down", + }).Info("Reverting migration") + cursor, err := db.Collection("sessions_events").Find(ctx, bson.M{}) + if err != nil { + return err + } + + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + var event models.SessionEvent + if err := cursor.Decode(&event); err != nil { + return err + } + sessionID := event.Session + + event.Session = "" + update := bson.M{"$push": bson.M{"events.items": event}} + if _, err := db.Collection("sessions").UpdateOne(ctx, bson.M{"uid": sessionID}, update); err != nil { + return err + } + } + + if err := db.Collection("sessions_events").Drop(ctx); err != nil { + return err + } + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_91_test.go b/api/store/mongo/migrations/migration_91_test.go new file mode 100644 index 00000000000..cc5d47ed021 --- /dev/null +++ b/api/store/mongo/migrations/migration_91_test.go @@ -0,0 +1,273 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration91Up(t *testing.T) { + ctx := context.Background() + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "Session with single event", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "test", + "events": bson.M{ + "types": bson.A{"test"}, + "items": []bson.M{ + {"type": "test", "data": "some data"}, + }, + }, + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("sessions_events"). + FindOne(ctx, bson.M{"session": "test"}) + sessionEvent := make(map[string]interface{}) + require.NoError(tt, query.Decode(&sessionEvent)) + require.Contains(tt, sessionEvent, "type") + require.Contains(tt, sessionEvent, "data") + + query = c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "test"}) + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + require.NotContains(tt, session, "events.items") + }, + }, + { + description: "Session with empty events.items array", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "empty-items", + "events": bson.M{ + "types": bson.A{"test"}, + "items": []bson.M{}, + }, + }) + + return err + }, + verify: func(tt *testing.T) { + count, err := c. + Database("test"). + Collection("sessions_events"). + CountDocuments(ctx, bson.M{"session": "empty-items"}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "empty-items"}) + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + require.NotContains(tt, session, "events.items") + }, + }, + { + description: "Session with multiple events", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "multi-event", + "events": bson.M{ + "types": bson.A{"test"}, + "items": []bson.M{ + {"type": "event1", "data": "data1"}, + {"type": "event2", "data": "data2"}, + }, + }, + }) + + return err + }, + verify: func(tt *testing.T) { + cursor, err := c. + Database("test"). + Collection("sessions_events"). + Find(ctx, bson.M{"session": "multi-event"}) + require.NoError(tt, err) + var events []bson.M + require.NoError(tt, cursor.All(ctx, &events)) + assert.Equal(tt, 2, len(events)) + for _, event := range events { + assert.Equal(tt, "multi-event", event["session"]) + } + + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "multi-event"}) + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + require.NotContains(tt, session, "events.items") + }, + }, + { + description: "Session with no events field", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "no-events", + }) + + return err + }, + verify: func(tt *testing.T) { + count, err := c. + Database("test"). + Collection("sessions_events"). + CountDocuments(ctx, bson.M{"session": "no-events"}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "no-events"}) + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + _, exists := session["events"] + assert.False(tt, exists) + }, + }, + { + description: "Verify indexes created", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "index-test", + "events": bson.M{ + "types": bson.A{"test"}, + "items": []bson.M{ + {"type": "test", "data": "index data"}, + }, + }, + }) + + return err + }, + verify: func(tt *testing.T) { + indexCursor, err := c. + Database("test"). + Collection("sessions_events"). + Indexes().List(ctx) + require.NoError(tt, err) + var indexes []bson.M + require.NoError(tt, indexCursor.All(ctx, &indexes)) + var sessionIndexFound bool + for _, index := range indexes { + if key, ok := index["key"].(bson.M); ok { + if _, ok := key["session"]; ok { + sessionIndexFound = true + } + } + } + assert.True(tt, sessionIndexFound) + }, + }, + { + description: "Multiple sessions processed", + setup: func() error { + docs := []interface{}{ + bson.M{ + "uid": "session1", + "events": bson.M{ + "types": bson.A{"test"}, + "items": []bson.M{ + {"type": "event1", "data": "data1"}, + }, + }, + }, + bson.M{ + "uid": "session2", + "events": bson.M{ + "types": bson.A{"test"}, + "items": []bson.M{ + {"type": "event2", "data": "data2"}, + {"type": "event3", "data": "data3"}, + }, + }, + }, + } + _, err := c. + Database("test"). + Collection("sessions"). + InsertMany(ctx, docs) + + return err + }, + verify: func(tt *testing.T) { + count1, err := c. + Database("test"). + Collection("sessions_events"). + CountDocuments(ctx, bson.M{"session": "session1"}) + require.NoError(tt, err) + assert.Equal(tt, int64(1), count1) + + count2, err := c. + Database("test"). + Collection("sessions_events"). + CountDocuments(ctx, bson.M{"session": "session2"}) + require.NoError(tt, err) + assert.Equal(tt, int64(2), count2) + + for _, uid := range []string{"session1", "session2"} { + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": uid}) + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + require.NotContains(tt, session, "events.items") + } + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + require.NoError(tt, tc.setup()) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[90]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_92.go b/api/store/mongo/migrations/migration_92.go new file mode 100644 index 00000000000..c4bd4f91ed9 --- /dev/null +++ b/api/store/mongo/migrations/migration_92.go @@ -0,0 +1,89 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration92 = migrate.Migration{ + Version: 92, + Description: "Adding seat and seats to sessions and sessions events", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 92, + "action": "Up", + }).Info("Applying migration") + + if _, err := db. + Collection("sessions"). + UpdateMany(ctx, bson.M{ + "authenticated": true, + }, bson.M{ + "$set": bson.M{ + "events.seats": bson.A{0}, + }, + }); err != nil { + return err + } + + if _, err := db. + Collection("sessions"). + UpdateMany(ctx, bson.M{ + "authenticated": false, + }, bson.M{ + "$set": bson.M{ + "events.seats": bson.A{}, + }, + }); err != nil { + return err + } + + if _, err := db. + Collection("sessions_events"). + UpdateMany(ctx, bson.M{}, bson.M{ + "$set": bson.M{ + "seat": 0, + }, + }); err != nil { + return err + } + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 92, + "action": "Down", + }).Info("Reverting migration") + + filter := bson.M{} + + if _, err := db. + Collection("sessions"). + UpdateMany(ctx, filter, bson.M{ + "$unset": bson.M{ + "events.seats": "", + }, + }); err != nil { + return err + } + + if _, err := db. + Collection("sessions_events"). + UpdateMany(ctx, filter, bson.M{ + "$unset": bson.M{ + "seat": "", + }, + }); err != nil { + return err + } + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_92_test.go b/api/store/mongo/migrations/migration_92_test.go new file mode 100644 index 00000000000..3ecd5f403b0 --- /dev/null +++ b/api/store/mongo/migrations/migration_92_test.go @@ -0,0 +1,276 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration92Up(t *testing.T) { + ctx := context.Background() + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "Authenticated session should get events.seats set to [int32(0)]", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "auth-test", + "authenticated": true, + "events": bson.M{ + "types": bson.A{"test"}, + }, + }) + if err != nil { + return err + } + _, err = c. + Database("test"). + Collection("sessions_events"). + InsertOne(ctx, bson.M{ + "session": "auth-test", + "type": "test", + "data": "some data", + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "auth-test"}) + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + events, ok := session["events"].(map[string]interface{}) + require.True(tt, ok) + seats, exists := events["seats"] + require.True(tt, exists) + assert.Equal(tt, bson.A{int32(0)}, seats) + + query = c. + Database("test"). + Collection("sessions_events"). + FindOne(ctx, bson.M{"session": "auth-test"}) + sessionEvent := make(map[string]interface{}) + require.NoError(tt, query.Decode(&sessionEvent)) + seat, exists := sessionEvent["seat"] + require.True(tt, exists) + assert.Equal(tt, int32(0), seat) + }, + }, + { + description: "Unauthenticated session should get events.seats set to []", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "unauth-test", + "authenticated": false, + "events": bson.M{ + "types": bson.A{"test"}, + }, + }) + if err != nil { + return err + } + _, err = c. + Database("test"). + Collection("sessions_events"). + InsertOne(ctx, bson.M{ + "session": "unauth-test", + "type": "test", + "data": "some data", + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "unauth-test"}) + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + events, ok := session["events"].(map[string]interface{}) + require.True(tt, ok) + seats, exists := events["seats"] + require.True(tt, exists) + assert.Equal(tt, bson.A{}, seats) + + query = c. + Database("test"). + Collection("sessions_events"). + FindOne(ctx, bson.M{"session": "unauth-test"}) + sessionEvent := make(map[string]interface{}) + require.NoError(tt, query.Decode(&sessionEvent)) + seat, exists := sessionEvent["seat"] + require.True(tt, exists) + assert.Equal(tt, int32(0), seat) + }, + }, + { + description: "Session without events field should be updated if authenticated", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "no-events-test", + "authenticated": true, + }) + if err != nil { + return err + } + _, err = c. + Database("test"). + Collection("sessions_events"). + InsertOne(ctx, bson.M{ + "session": "no-events-test", + "type": "test", + "data": "some data", + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "no-events-test"}) + session := make(map[string]interface{}) + require.NoError(tt, query.Decode(&session)) + events, ok := session["events"].(map[string]interface{}) + require.True(tt, ok) + seats, exists := events["seats"] + require.True(tt, exists) + assert.Equal(tt, bson.A{int32(0)}, seats) + + query = c. + Database("test"). + Collection("sessions_events"). + FindOne(ctx, bson.M{"session": "no-events-test"}) + sessionEvent := make(map[string]interface{}) + require.NoError(tt, query.Decode(&sessionEvent)) + seat, exists := sessionEvent["seat"] + require.True(tt, exists) + assert.Equal(tt, int32(0), seat) + }, + }, + { + description: "Multiple sessions documents update correctly", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertMany(ctx, []interface{}{ + bson.M{ + "uid": "multi-test-auth", + "authenticated": true, + "events": bson.M{"types": bson.A{"test"}}, + }, + bson.M{ + "uid": "multi-test-unauth", + "authenticated": false, + "events": bson.M{"types": bson.A{"test"}}, + }, + }) + if err != nil { + return err + } + _, err = c. + Database("test"). + Collection("sessions_events"). + InsertMany(ctx, []interface{}{ + bson.M{ + "session": "multi-test-auth", + "type": "test", + "data": "data1", + }, + bson.M{ + "session": "multi-test-unauth", + "type": "test", + "data": "data2", + }, + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "multi-test-auth"}) + sessionAuth := make(map[string]interface{}) + require.NoError(tt, query.Decode(&sessionAuth)) + eventsAuth, ok := sessionAuth["events"].(map[string]interface{}) + require.True(tt, ok) + seatsAuth, exists := eventsAuth["seats"] + require.True(tt, exists) + assert.Equal(tt, bson.A{int32(0)}, seatsAuth) + + query = c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "multi-test-unauth"}) + sessionUnauth := make(map[string]interface{}) + require.NoError(tt, query.Decode(&sessionUnauth)) + eventsUnauth, ok := sessionUnauth["events"].(map[string]interface{}) + require.True(tt, ok) + seatsUnauth, exists := eventsUnauth["seats"] + require.True(tt, exists) + assert.Equal(tt, bson.A{}, seatsUnauth) + + query = c. + Database("test"). + Collection("sessions_events"). + FindOne(ctx, bson.M{"session": "multi-test-auth"}) + sessionEventAuth := make(map[string]interface{}) + require.NoError(tt, query.Decode(&sessionEventAuth)) + seatAuth, exists := sessionEventAuth["seat"] + require.True(tt, exists) + assert.Equal(tt, int32(0), seatAuth) + + query = c. + Database("test"). + Collection("sessions_events"). + FindOne(ctx, bson.M{"session": "multi-test-unauth"}) + sessionEventUnauth := make(map[string]interface{}) + require.NoError(tt, query.Decode(&sessionEventUnauth)) + seatUnauth, exists := sessionEventUnauth["seat"] + require.True(tt, exists) + assert.Equal(tt, int32(0), seatUnauth) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[91]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_93.go b/api/store/mongo/migrations/migration_93.go new file mode 100644 index 00000000000..809cbffa153 --- /dev/null +++ b/api/store/mongo/migrations/migration_93.go @@ -0,0 +1,35 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration93 = migrate.Migration{ + Version: 93, + Description: "remove public_url and public_url_address from device collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 93, + "action": "Up", + }).Info("Applying migration") + + _, err := db.Collection("devices").UpdateMany(ctx, bson.M{}, bson.M{"$unset": bson.M{"public_url": "", "public_url_address": ""}}) + + return err + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 93, + "action": "Down", + }).Info("Cannot undo migration") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_93_test.go b/api/store/mongo/migrations/migration_93_test.go new file mode 100644 index 00000000000..58b6e4257ac --- /dev/null +++ b/api/store/mongo/migrations/migration_93_test.go @@ -0,0 +1,71 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration93Up(t *testing.T) { + ctx := context.Background() + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "removes the public_url attribute from devices", + setup: func() error { + _, err := c.Database("test").Collection("devices").InsertOne(ctx, bson.M{"uid": "uid", "public_url": true}) + + return err + }, + verify: func(tt *testing.T) { + device := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "uid"}).Decode(&device)) + + _, ok := device["public_url"] + require.Equal(tt, false, ok) + }, + }, + { + description: "removes the public_url_address attribute from devices", + setup: func() error { + _, err := c.Database("test").Collection("devices").InsertOne(ctx, bson.M{"uid": "uid", "public_url_address": "address"}) + + return err + }, + verify: func(tt *testing.T) { + device := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("devices").FindOne(ctx, bson.M{"uid": "uid"}).Decode(&device)) + + _, ok := device["public_url_address"] + require.Equal(tt, false, ok) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[92]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_94.go b/api/store/mongo/migrations/migration_94.go new file mode 100644 index 00000000000..035d0629cbd --- /dev/null +++ b/api/store/mongo/migrations/migration_94.go @@ -0,0 +1,89 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration94 = migrate.Migration{ + Version: 94, + Description: "Adding 'disconnected_at' attribute to 'devices'", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 94, + "action": "Up", + }).Info("Applying migration") + + session, err := db.Client().StartSession() + if err != nil { + return err + } + defer session.EndSession(ctx) + + fn := func(_ mongo.SessionContext) (any, error) { + pipeline := []bson.M{ + { + "$match": bson.M{ + "uid": bson.M{ + "$exists": true, + }, + }, + }, + { + "$lookup": bson.M{ + "from": "connected_devices", + "localField": "uid", + "foreignField": "uid", + "as": "connected_devices_data", + }, + }, + } + + cursor, err := db.Collection("devices").Aggregate(ctx, pipeline) + if err != nil { + return nil, err + } + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + device := make(map[string]any) + if err := cursor.Decode(&device); err != nil { + return nil, err + } + + update := bson.M{"$set": bson.M{"disconnected_at": device["last_seen"]}} + if connectedDevicesData, ok := device["connected_devices_data"].(bson.A); ok && len(connectedDevicesData) > 0 { + update = bson.M{"$set": bson.M{"disconnected_at": nil}} + } + + if _, err := db.Collection("devices").UpdateOne(ctx, bson.M{"_id": device["_id"]}, update); err != nil { + return nil, err + } + } + + if err := db.Collection("connected_devices").Drop(ctx); err != nil { + return nil, err + } + + return nil, nil + } + + _, err = session.WithTransaction(ctx, fn) + + return err + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 94, + "action": "Down", + }).Info("Cannot down migration") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_94_test.go b/api/store/mongo/migrations/migration_94_test.go new file mode 100644 index 00000000000..b81965e0439 --- /dev/null +++ b/api/store/mongo/migrations/migration_94_test.go @@ -0,0 +1,112 @@ +package migrations + +import ( + "context" + "slices" + "testing" + "time" + + "github.com/shellhub-io/shellhub/pkg/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestMigration94Up(t *testing.T) { + cases := []struct { + description string + setup func(ctx context.Context) error + verify func(ctx context.Context, tt *testing.T) + }{ + { + description: "drops the 'connected_devices' collection", + setup: func(ctx context.Context) error { + _, err := c.Database("test").Collection("connected_devices").InsertOne(ctx, bson.M{"uid": "auth-test"}) + + return err + }, + verify: func(ctx context.Context, tt *testing.T) { + res, err := c.Database("test").ListCollectionNames(ctx, bson.M{}) + require.NoError(tt, err) + require.Equal(tt, false, slices.Contains(res, "connected_devices")) + }, + }, + { + description: "sets the value to nil when the device have a related connected_device", + setup: func(ctx context.Context) error { + randomUIDs := []string{uuid.Generate(), uuid.Generate()} + for _, uid := range randomUIDs { + if _, err := c.Database("test").Collection("devices").InsertOne(ctx, bson.M{"uid": uid, "last_seen": time.Now()}); err != nil { + return err + } + + if _, err := c.Database("test").Collection("connected_devices").InsertOne(ctx, bson.M{"uid": uid}); err != nil { + return err + } + } + + return nil + }, + verify: func(ctx context.Context, tt *testing.T) { + cursor, err := c.Database("test").Collection("devices").Find(ctx, bson.M{}) + require.NoError(tt, err) + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + device := make(map[string]any) + require.NoError(tt, cursor.Decode(&device)) + + disconnectedAt, ok := device["disconnected_at"] + require.Equal(tt, true, ok) + require.Equal(tt, nil, disconnectedAt) + } + }, + }, + { + description: "sets the value to last_seen when the device does not have a related connected_device", + setup: func(ctx context.Context) error { + randomUIDs := []string{uuid.Generate(), uuid.Generate()} + for _, uid := range randomUIDs { + _, err := c.Database("test").Collection("devices").InsertOne(ctx, bson.M{"uid": uid, "last_seen": time.Now()}) + + return err + } + + return nil + }, + verify: func(ctx context.Context, tt *testing.T) { + cursor, err := c.Database("test").Collection("devices").Find(ctx, bson.M{}) + require.NoError(tt, err) + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + device := make(map[string]any) + require.NoError(tt, cursor.Decode(&device)) + + disconnectedAt, ok := device["disconnected_at"] + require.Equal(tt, true, ok) + require.WithinDuration(tt, device["last_seen"].(primitive.DateTime).Time(), disconnectedAt.(primitive.DateTime).Time(), 1*time.Second) + } + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup(ctx)) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[93]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(ctx, tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_95.go b/api/store/mongo/migrations/migration_95.go new file mode 100644 index 00000000000..5646bebab5a --- /dev/null +++ b/api/store/mongo/migrations/migration_95.go @@ -0,0 +1,379 @@ +package migrations + +import ( + "context" + "fmt" + "slices" + + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var migration95 = migrate.Migration{ + Version: 95, + Description: "Convert recorded sessions into session's events", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 95, + "action": "Up", + }).Info("Applying migration") + + if !envs.IsEnterprise() { + log.Info("skipping migration as the ShellHub instance isn't enterprise") + + return nil + } + + /*sessionUIDsCursor, err := db.Collection("recorded_sessions").Aggregate(ctx, []bson.M{ + {"$group": bson.M{"_id": "$uid"}}, + }) + if err != nil { + return fmt.Errorf("failed to query session UIDs: %w", err) + } + defer sessionUIDsCursor.Close(ctx) + + var sessionUIDs []string + for sessionUIDsCursor.Next(ctx) { + var result struct { + UID string `bson:"_id"` + } + if err := sessionUIDsCursor.Decode(&result); err != nil { + log.WithError(err).Error("Failed to decode UID result") + + return err + } + sessionUIDs = append(sessionUIDs, result.UID) + }*/ + + sessionUIDs, err := db.Collection("recorded_sessions").Distinct(ctx, "uid", bson.M{}) + if err != nil { + log.WithError(err).Error("failed to get all recorded_sessions uids") + + return fmt.Errorf("failed to query session UIDs: %w", err) + } + + if len(sessionUIDs) == 0 { + log.Info("No recorded sessions found") + + return nil + } + + sessionsCursor, err := db.Collection("sessions").Find(ctx, bson.M{ + "uid": bson.M{"$in": sessionUIDs}, + }) + if err != nil { + return fmt.Errorf("failed to query sessions: %w", err) + } + + defer sessionsCursor.Close(ctx) + + existingSessions := make(map[string]*models.Session) + for sessionsCursor.Next(ctx) { + var session models.Session + if err := sessionsCursor.Decode(&session); err != nil { + log.WithError(err).Error("Failed to decode session") + + return err + } + existingSessions[session.UID] = &session + } + + for _, uid := range sessionUIDs { + session := existingSessions[uid.(string)] + + logger := log.WithField("uid", uid) + + logger.Debug("Processing session") + if session == nil { + if _, err := db.Collection("recorded_sessions").DeleteMany(ctx, bson.M{"uid": uid}); err != nil { + logger.WithError(err).Error("failed to delete the recorded session when session isn't found") + + return err + } + + logger.Debug("Deleted recorded session for a not found session") + + continue + } + + recordsCursor, err := db.Collection("recorded_sessions").Find(ctx, bson.M{"uid": uid}, options.Find().SetSort(bson.D{{Key: "time", Value: 1}})) + if err != nil { + logger.WithError(err).Error("Failed to query session records") + + return err + } + + defer recordsCursor.Close(ctx) + + var records []models.RecordedSession + if err := recordsCursor.All(ctx, &records); err != nil { + logger.WithError(err).Error("Failed to decode all records") + + return err + } + + if len(records) == 0 { + logger.Debug("No records found for session") + + return nil + } + + var sessionEvents []interface{} + var sessionUpdates []mongo.WriteModel + eventTypesToAdd := make(map[string]bool) + firstRecord := records[0] + + lastWidth, lastHeight := firstRecord.Width, firstRecord.Height + + if !slices.Contains(session.Events.Types, string(models.SessionEventTypePtyRequest)) { + eventTypesToAdd[string(models.SessionEventTypePtyRequest)] = true + sessionEvents = append( + sessionEvents, + &models.SessionEvent{ + Session: uid.(string), + Type: models.SessionEventTypePtyRequest, + Timestamp: firstRecord.Time, + Data: &models.SSHPty{ + Term: "", + Columns: uint32(firstRecord.Width), + Rows: uint32(firstRecord.Height), + Width: 0, + Height: 0, + Modelist: []byte{}, + }, + Seat: 0, + }, + ) + } + + if !slices.Contains(session.Events.Types, string(models.SessionEventTypePtyOutput)) { + eventTypesToAdd[string(models.SessionEventTypePtyOutput)] = true + } + + for _, record := range records { + if record.Width != lastWidth || record.Height != lastHeight { + if !slices.Contains(session.Events.Types, string(models.SessionEventTypeWindowChange)) { + eventTypesToAdd[string(models.SessionEventTypeWindowChange)] = true + } + sessionEvents = append( + sessionEvents, + &models.SessionEvent{ + Session: uid.(string), + Type: models.SessionEventTypeWindowChange, + Timestamp: record.Time, + Data: &models.SSHWindowChange{ + Columns: uint32(record.Width), + Rows: uint32(record.Height), + Width: 0, + Height: 0, + }, + Seat: 0, + }, + ) + lastWidth, lastHeight = record.Width, record.Height + } + sessionEvents = append( + sessionEvents, + &models.SessionEvent{ + Session: uid.(string), + Type: models.SessionEventTypePtyOutput, + Timestamp: record.Time, + Data: &models.SSHPtyOutput{Output: record.Message}, + Seat: 0, + }, + ) + } + + if len(sessionEvents) > 0 { + if _, err := db.Collection("sessions_events").InsertMany(ctx, sessionEvents); err != nil { + logger.WithError(err).Error("Failed to bulk insert session events") + + return err + } + } + + if len(eventTypesToAdd) > 0 { + var typesToAdd []string + for eventType := range eventTypesToAdd { + typesToAdd = append(typesToAdd, eventType) + } + updateDoc := bson.M{"$addToSet": bson.M{ + "events.types": bson.M{"$each": typesToAdd}, + "events.seats": 0, + }} + sessionUpdates = append(sessionUpdates, mongo.NewUpdateOneModel().SetFilter(bson.M{"uid": uid}).SetUpdate(updateDoc)) + } + + if len(sessionUpdates) > 0 { + if _, err := db.Collection("sessions").BulkWrite(ctx, sessionUpdates); err != nil { + logger.WithError(err).Error("Failed to bulk update session") + + return err + } + } + + if _, err := db.Collection("recorded_sessions").DeleteMany(ctx, bson.M{"uid": uid}); err != nil { + logger.WithError(err).Error("failed to delete the recorded session") + + return err + } + + logger.Debug("Successfully processed session") + } + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 95, + "action": "Down", + }).Info("Reverting migration") + + if !envs.IsEnterprise() { + return nil + } + + cursor, err := db.Collection("sessions").Find(ctx, bson.M{ + "events.types": bson.M{ + "$in": []models.SessionEventType{ + models.SessionEventTypePtyRequest, + models.SessionEventTypePtyOutput, + models.SessionEventTypeWindowChange, + }, + }, + }) + if err != nil { + return fmt.Errorf("failed to query sessions: %w", err) + } + + defer cursor.Close(ctx) + + for cursor.Next(ctx) { + var session struct { + UID string `bson:"uid"` + } + + if err := cursor.Decode(&session); err != nil { + log.WithError(err).Error("Failed to decode session") + + return err + } + + uid := session.UID + log.WithField("uid", uid).Debug("Reverting session") + + eventsCursor, err := db.Collection("sessions_events").Find(ctx, bson.M{ + "session": uid, + "type": bson.M{ + "$in": []models.SessionEventType{ + models.SessionEventTypePtyRequest, + models.SessionEventTypePtyOutput, + models.SessionEventTypeWindowChange, + }, + }, + }, options.Find().SetSort(bson.D{{Key: "timestamp", Value: 1}})) + if err != nil { + log.WithError(err).WithField("uid", uid).Error("Failed to query session events") + + return err + } + + defer eventsCursor.Close(ctx) + + var recordedSessions []interface{} + var lastWidth, lastHeight uint32 + + for eventsCursor.Next(ctx) { + var event models.SessionEvent + if err := eventsCursor.Decode(&event); err != nil { + log.WithError(err).WithField("uid", uid).Error("Failed to decode event") + + continue + } + + switch event.Type { + case models.SessionEventTypePtyRequest: + d := &models.SSHPty{} + data, _ := bson.Marshal(event.Data.(primitive.D)) + if err := bson.Unmarshal(data, &d); err != nil { + return err + } + lastWidth, lastHeight = d.Columns, d.Rows + + case models.SessionEventTypeWindowChange: + d := &models.SSHWindowChange{} + data, _ := bson.Marshal(event.Data.(primitive.D)) + if err := bson.Unmarshal(data, &d); err != nil { + return err + } + lastWidth, lastHeight = d.Columns, d.Rows + + case models.SessionEventTypePtyOutput: + d := &models.SSHPtyOutput{} + data, _ := bson.Marshal(event.Data.(primitive.D)) + if err := bson.Unmarshal(data, &d); err != nil { + return err + } + + recordedSessions = append(recordedSessions, bson.M{ + "uid": uid, + "message": d.Output, + "time": event.Timestamp, + "width": lastWidth, + "height": lastHeight, + }) + } + } + + if len(recordedSessions) > 0 { + if _, err := db.Collection("recorded_sessions").InsertMany(ctx, recordedSessions); err != nil { + log.WithError(err).WithField("uid", uid).Error("Failed to bulk insert recorded sessions") + } + } + + _, err = db.Collection("sessions_events").DeleteMany(ctx, bson.M{ + "session": uid, + "type": bson.M{ + "$in": []models.SessionEventType{ + models.SessionEventTypePtyRequest, + models.SessionEventTypePtyOutput, + models.SessionEventTypeWindowChange, + }, + }, + }) + if err != nil { + log.WithError(err).WithField("uid", uid).Error("Failed to delete session events") + } + + _, err = db.Collection("sessions").UpdateOne(ctx, + bson.M{"uid": uid}, + bson.M{ + "$pull": bson.M{ + "events.types": bson.M{ + "$in": []models.SessionEventType{ + models.SessionEventTypePtyRequest, + models.SessionEventTypePtyOutput, + models.SessionEventTypeWindowChange, + }, + }, + }, + }, + ) + if err != nil { + log.WithError(err).WithField("uid", uid).Error("Failed to update session") + } + + log.WithField("uid", uid).Debug("Successfully reverted session") + } + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_95_test.go b/api/store/mongo/migrations/migration_95_test.go new file mode 100644 index 00000000000..b4663d3ea91 --- /dev/null +++ b/api/store/mongo/migrations/migration_95_test.go @@ -0,0 +1,435 @@ +package migrations + +import ( + "context" + "fmt" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration95Up(t *testing.T) { + ctx := context.Background() + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("true").Once() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "Single recorded session should be converted to events", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-1", + "authenticated": true, + "events": bson.M{ + "types": bson.A{}, + "seats": bson.A{}, + }, + }) + if err != nil { + return err + } + + _, err = c. + Database("test"). + Collection("recorded_sessions"). + InsertMany(ctx, []any{ + bson.M{ + "uid": "session-1", + "message": "initial output", + "time": "2023-01-01T10:00:00Z", + "width": 80, + "height": 24, + }, + bson.M{ + "uid": "session-1", + "message": "resized terminal output", + "time": "2023-01-01T10:01:00Z", + "width": 100, + "height": 30, + }, + bson.M{ + "uid": "session-1", + "message": "final output", + "time": "2023-01-01T10:02:00Z", + "width": 100, + "height": 30, + }, + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "session-1"}) + + session := make(map[string]any) + require.NoError(tt, query.Decode(&session)) + + events, ok := session["events"].(map[string]any) + require.True(tt, ok) + + types, ok := events["types"].(bson.A) + require.True(tt, ok) + assert.Contains(tt, types, "pty-req") + assert.Contains(tt, types, "window-change") + assert.Contains(tt, types, "pty-output") + + seats, ok := events["seats"].(bson.A) + require.True(tt, ok) + assert.Contains(tt, seats, int32(0)) + + cursor, err := c. + Database("test"). + Collection("sessions_events"). + Find(ctx, bson.M{"session": "session-1"}) + require.NoError(tt, err) + defer cursor.Close(ctx) + + var sessionEvents []map[string]any + require.NoError(tt, cursor.All(ctx, &sessionEvents)) + + assert.Equal(tt, 5, len(sessionEvents)) + + eventTypeCounts := make(map[string]int) + for _, event := range sessionEvents { + eventType := event["type"].(string) + eventTypeCounts[eventType]++ + assert.Equal(tt, int32(0), event["seat"]) + } + + assert.Equal(tt, 1, eventTypeCounts["pty-req"]) + assert.Equal(tt, 1, eventTypeCounts["window-change"]) + assert.Equal(tt, 3, eventTypeCounts["pty-output"]) + + count, err := c. + Database("test"). + Collection("recorded_sessions"). + CountDocuments(ctx, bson.M{"uid": "session-1"}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + }, + }, + { + description: "When not in enterprise mode, nothing should be migrated", + setup: func() error { + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() + + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-4", + "authenticated": true, + "events": bson.M{ + "types": bson.A{}, + "seats": bson.A{}, + }, + }) + if err != nil { + return err + } + + _, err = c. + Database("test"). + Collection("recorded_sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-4", + "message": "test output", + "time": "2023-01-01T10:00:00Z", + "width": 80, + "height": 24, + }) + + return err + }, + verify: func(tt *testing.T) { + count, err := c. + Database("test"). + Collection("recorded_sessions"). + CountDocuments(ctx, bson.M{"uid": "session-4"}) + require.NoError(tt, err) + assert.Equal(tt, int64(1), count) + + count, err = c. + Database("test"). + Collection("sessions_events"). + CountDocuments(ctx, bson.M{"session": "session-4"}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[94]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} + +func TestMigration95Down(t *testing.T) { + ctx := context.Background() + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("true").Twice() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "Success to revert migration 95 in cloud mode", + setup: func() error { + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-1", + "authenticated": true, + "events": bson.M{ + "types": bson.A{"pty-req", "window-change", "pty-output"}, + "seats": bson.A{int32(0)}, + }, + }) + if err != nil { + return err + } + + timestamp1 := "2023-01-01T10:00:00Z" + timestamp2 := "2023-01-01T10:01:00Z" + timestamp3 := "2023-01-01T10:02:00Z" + + _, err = c. + Database("test"). + Collection("sessions_events"). + InsertMany(ctx, []any{ + bson.M{ + "session": "session-1", + "type": "pty-req", + "timestamp": timestamp1, + "data": bson.M{ + "term": "", + "columns": uint32(80), + "rows": uint32(24), + "width": uint32(0), + "height": uint32(0), + "modelist": []byte{}, + }, + "seat": int32(0), + }, + bson.M{ + "session": "session-1", + "type": "window-change", + "timestamp": timestamp2, + "data": bson.M{ + "columns": uint32(100), + "rows": uint32(30), + "width": uint32(0), + "height": uint32(0), + }, + "seat": int32(0), + }, + bson.M{ + "session": "session-1", + "type": "pty-output", + "timestamp": timestamp1, + "data": bson.M{ + "output": "initial output", + }, + "seat": int32(0), + }, + bson.M{ + "session": "session-1", + "type": "pty-output", + "timestamp": timestamp2, + "data": bson.M{ + "output": "resized terminal output", + }, + "seat": int32(0), + }, + bson.M{ + "session": "session-1", + "type": "pty-output", + "timestamp": timestamp3, + "data": bson.M{ + "output": "final output", + }, + "seat": int32(0), + }, + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "session-1"}) + + session := make(map[string]any) + require.NoError(tt, query.Decode(&session)) + + events, ok := session["events"].(map[string]any) + require.True(tt, ok) + + types, ok := events["types"].(bson.A) + require.True(tt, ok) + assert.NotContains(tt, types, "pty-req") + assert.NotContains(tt, types, "window-change") + assert.NotContains(tt, types, "pty-output") + + count, err := c. + Database("test"). + Collection("sessions_events"). + CountDocuments(ctx, bson.M{ + "session": "session-1", + "type": bson.M{ + "$in": []string{"pty-req", "window-change", "pty-output"}, + }, + }) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + + cursor, err := c. + Database("test"). + Collection("recorded_sessions"). + Find(ctx, bson.M{"uid": "session-1"}) + require.NoError(tt, err) + defer cursor.Close(ctx) + + var recordedSessions []map[string]any + require.NoError(tt, cursor.All(ctx, &recordedSessions)) + + assert.Equal(tt, 3, len(recordedSessions)) + + widthHeightCount := make(map[string]int) + for _, record := range recordedSessions { + key := fmt.Sprintf("%v-%v", record["width"], record["height"]) + widthHeightCount[key]++ + assert.Contains(tt, []string{ + "initial output", + "resized terminal output", + "final output", + }, record["message"]) + } + + assert.Equal(tt, 1, widthHeightCount["80-24"]) + assert.Equal(tt, 2, widthHeightCount["100-30"]) + }, + }, + { + description: "Skip migration revert when not in enterprise mode", + setup: func() error { + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Twice() + + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-3", + "authenticated": true, + "events": bson.M{ + "types": bson.A{"pty-req", "pty-output"}, + "seats": bson.A{int32(0)}, + }, + }) + if err != nil { + return err + } + + _, err = c. + Database("test"). + Collection("sessions_events"). + InsertOne(ctx, bson.M{ + "session": "session-3", + "type": "pty-output", + "timestamp": "2023-01-01T10:00:00Z", + "data": bson.M{ + "output": "test output", + }, + "seat": int32(0), + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("sessions"). + FindOne(ctx, bson.M{"uid": "session-3"}) + + session := make(map[string]any) + require.NoError(tt, query.Decode(&session)) + + events, ok := session["events"].(map[string]any) + require.True(tt, ok) + + types, ok := events["types"].(bson.A) + require.True(tt, ok) + assert.Contains(tt, types, "pty-req") + assert.Contains(tt, types, "pty-output") + + count, err := c. + Database("test"). + Collection("sessions_events"). + CountDocuments(ctx, bson.M{ + "session": "session-3", + "type": "pty-output", + }) + require.NoError(tt, err) + assert.Equal(tt, int64(1), count) + + count, err = c. + Database("test"). + Collection("recorded_sessions"). + CountDocuments(ctx, bson.M{"uid": "session-3"}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[94]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_96.go b/api/store/mongo/migrations/migration_96.go new file mode 100644 index 00000000000..e809d9ca3f6 --- /dev/null +++ b/api/store/mongo/migrations/migration_96.go @@ -0,0 +1,49 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/envs" + log "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration96 = migrate.Migration{ + Version: 96, + Description: "Drops the recorded_session collection", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 96, + "action": "Up", + }).Info("Applying migration") + + if !envs.IsEnterprise() { + return nil + } + + if err := db.Collection("recorded_sessions").Drop(ctx); err != nil { + return err + } + + return nil + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + log.WithFields(log.Fields{ + "component": "migration", + "version": 96, + "action": "Down", + }).Info("Reverting migration") + + if !envs.IsEnterprise() { + return nil + } + + if err := db.CreateCollection(ctx, "recorded_sessions"); err != nil { + return err + } + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_96_test.go b/api/store/mongo/migrations/migration_96_test.go new file mode 100644 index 00000000000..7557e9165e2 --- /dev/null +++ b/api/store/mongo/migrations/migration_96_test.go @@ -0,0 +1,261 @@ +package migrations + +import ( + "context" + "slices" + "testing" + + "github.com/shellhub-io/shellhub/pkg/envs" + envmock "github.com/shellhub-io/shellhub/pkg/envs/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration96Up(t *testing.T) { + ctx := context.Background() + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "Drop recorded_sessions collection in enterprise mode", + setup: func() error { + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("true").Once() + + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-1", + "authenticated": true, + }) + if err != nil { + return err + } + + _, err = c. + Database("test"). + Collection("recorded_sessions"). + InsertMany(ctx, []any{ + bson.M{ + "uid": "session-1", + "message": "test output 1", + "time": "2023-01-01T10:00:00Z", + "width": 80, + "height": 24, + }, + bson.M{ + "uid": "session-1", + "message": "test output 2", + "time": "2023-01-01T10:01:00Z", + "width": 80, + "height": 24, + }, + }) + + return err + }, + verify: func(tt *testing.T) { + collections, err := c.Database("test").ListCollectionNames(ctx, bson.M{}) + require.NoError(tt, err) + + hasRecordedSessions := slices.Contains(collections, "recorded_sessions") + + if hasRecordedSessions { + count, err := c. + Database("test"). + Collection("recorded_sessions"). + CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + } + + count, err := c. + Database("test"). + Collection("sessions"). + CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(1), count) + }, + }, + { + description: "Nothing happens when not in enterprise mode", + setup: func() error { + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() + + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-2", + "authenticated": true, + }) + if err != nil { + return err + } + + _, err = c. + Database("test"). + Collection("recorded_sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-2", + "message": "test output", + "time": "2023-01-01T10:00:00Z", + "width": 80, + "height": 24, + }) + + return err + }, + verify: func(tt *testing.T) { + count, err := c. + Database("test"). + Collection("recorded_sessions"). + CountDocuments(ctx, bson.M{"uid": "session-2"}) + require.NoError(tt, err) + assert.Equal(tt, int64(1), count) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[94]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} + +func TestMigration96Down(t *testing.T) { + ctx := context.Background() + mock := &envmock.Backend{} + envs.DefaultBackend = mock + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "Migration down has no effect", + setup: func() error { + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("true").Once() + + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-3", + "authenticated": true, + }) + if err != nil { + return err + } + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[94]) + if err := migrates.Up(ctx, migrate.AllAvailable); err != nil { + return err + } + + return nil + }, + verify: func(tt *testing.T) { + collections, err := c.Database("test").ListCollectionNames(ctx, bson.M{}) + require.NoError(tt, err) + + hasRecordedSessions := slices.Contains(collections, "recorded_sessions") + + if hasRecordedSessions { + count, err := c. + Database("test"). + Collection("recorded_sessions"). + CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + } + + count, err := c. + Database("test"). + Collection("sessions"). + CountDocuments(ctx, bson.M{}) + require.NoError(tt, err) + assert.Equal(tt, int64(1), count) + }, + }, + { + description: "Nothing happens when not in enterprise mode", + setup: func() error { + mock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Twice() + + _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-4", + "authenticated": true, + }) + if err != nil { + return err + } + + _, err = c. + Database("test"). + Collection("recorded_sessions"). + InsertOne(ctx, bson.M{ + "uid": "session-4", + "message": "test output", + "time": "2023-01-01T10:00:00Z", + "width": 80, + "height": 24, + }) + if err != nil { + return err + } + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[95]) + if err := migrates.Up(ctx, migrate.AllAvailable); err != nil { + return err + } + + return nil + }, + verify: func(tt *testing.T) { + count, err := c. + Database("test"). + Collection("recorded_sessions"). + CountDocuments(ctx, bson.M{"uid": "session-4"}) + require.NoError(tt, err) + assert.Equal(tt, int64(1), count) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[95]) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_97.go b/api/store/mongo/migrations/migration_97.go new file mode 100644 index 00000000000..7c68c555642 --- /dev/null +++ b/api/store/mongo/migrations/migration_97.go @@ -0,0 +1,46 @@ +package migrations + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration97 = migrate.Migration{ + Version: 97, + Description: "Set namespace type to team when type is empty", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 97, + "action": "Up", + }).Info("Applying migration") + + if _, err := db. + Collection("namespaces"). + UpdateMany(ctx, bson.M{ + "type": "", + }, bson.M{ + "$set": bson.M{ + "type": models.TypePersonal, + }, + }); err != nil { + return err + } + + return nil + }), + Down: migrate.MigrationFunc(func(_ context.Context, _ *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 97, + "action": "Down", + }).Info("Cannot undo migration") + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_97_test.go b/api/store/mongo/migrations/migration_97_test.go new file mode 100644 index 00000000000..35580a2dd33 --- /dev/null +++ b/api/store/mongo/migrations/migration_97_test.go @@ -0,0 +1,150 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration97Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "Session with empty type should be updated to personal", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, bson.M{ + "tenant_id": "empty-type-test", + "type": "", + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("namespaces"). + FindOne(ctx, bson.M{"tenant_id": "empty-type-test"}) + namespace := make(map[string]any) + require.NoError(tt, query.Decode(&namespace)) + + assert.Equal(tt, "personal", namespace["type"], + "Session type should be updated to 'personal' when originally empty") + }, + }, + { + description: "Session with non-empty type should remain unchanged", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertOne(ctx, bson.M{ + "tenant_id": "existing-type-test", + "type": "existing", + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("namespaces"). + FindOne(ctx, bson.M{"tenant_id": "existing-type-test"}) + namespace := make(map[string]any) + require.NoError(tt, query.Decode(&namespace)) + + assert.Equal(tt, "existing", namespace["type"], + "Session type should remain unchanged when not empty") + }, + }, + { + description: "Multiple namespaces with empty type should be updated", + setup: func() error { + _, err := c. + Database("test"). + Collection("namespaces"). + InsertMany(ctx, []any{ + bson.M{ + "tenant_id": "multi-empty-1", + "type": "", + }, + bson.M{ + "tenant_id": "multi-empty-2", + "type": "", + }, + bson.M{ + "tenant_id": "multi-existing", + "type": "existing", + }, + }) + + return err + }, + verify: func(tt *testing.T) { + query := c. + Database("test"). + Collection("namespaces"). + FindOne(ctx, bson.M{"tenant_id": "multi-empty-1"}) + namespace1 := make(map[string]any) + require.NoError(tt, query.Decode(&namespace1)) + assert.Equal(tt, "personal", namespace1["type"]) + + query = c. + Database("test"). + Collection("namespaces"). + FindOne(ctx, bson.M{"tenant_id": "multi-empty-2"}) + namespace2 := make(map[string]any) + require.NoError(tt, query.Decode(&namespace2)) + assert.Equal(tt, "personal", namespace2["type"]) + + query = c. + Database("test"). + Collection("namespaces"). + FindOne(ctx, bson.M{"tenant_id": "multi-existing"}) + namespaceExisting := make(map[string]any) + require.NoError(tt, query.Decode(&namespaceExisting)) + assert.Equal(tt, "existing", namespaceExisting["type"]) + }, + }, + { + description: "No namespaces with empty type should handle gracefully", + setup: func() error { + return nil + }, + verify: func(tt *testing.T) { + count, err := c. + Database("test"). + Collection("namespaces"). + CountDocuments(ctx, bson.M{"type": ""}) + require.NoError(tt, err) + assert.Equal(tt, int64(0), count) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[96]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_98.go b/api/store/mongo/migrations/migration_98.go new file mode 100644 index 00000000000..459cf8daa9a --- /dev/null +++ b/api/store/mongo/migrations/migration_98.go @@ -0,0 +1,37 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration98 = migrate.Migration{ + Version: 98, + Description: "Convert the username's to nil when it's a blank string", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 98, + "action": "Up", + }).Info("Applying migration") + + _, err := db.Collection("users").UpdateMany(ctx, bson.M{"username": ""}, bson.M{"$set": bson.M{"username": nil}}) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 98, + "action": "Down", + }).Info("Cannot undo migration") + + _, err := db.Collection("users").UpdateMany(ctx, bson.M{"username": nil}, bson.M{"$set": bson.M{"username": ""}}) + + return err + }), +} diff --git a/api/store/mongo/migrations/migration_98_test.go b/api/store/mongo/migrations/migration_98_test.go new file mode 100644 index 00000000000..d30e2ab51e9 --- /dev/null +++ b/api/store/mongo/migrations/migration_98_test.go @@ -0,0 +1,102 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration98Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, bson.M{"email": "john.doe@test.com", "username": ""}) + + return err + }, + verify: func(tt *testing.T) { + user := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("users").FindOne(ctx, bson.M{"email": "john.doe@test.com"}).Decode(&user)) + + username, ok := user["username"] + require.Equal(tt, true, ok) + require.Equal(tt, nil, username) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[97]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} + +func TestMigration98Down(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "succeeds", + setup: func() error { + _, err := c. + Database("test"). + Collection("users"). + InsertOne(ctx, bson.M{"email": "john.doe@test.com", "username": nil}) + + return err + }, + verify: func(tt *testing.T) { + user := make(map[string]any) + require.NoError(tt, c.Database("test").Collection("users").FindOne(ctx, bson.M{"email": "john.doe@test.com"}).Decode(&user)) + + username, ok := user["username"] + require.Equal(tt, true, ok) + require.Equal(tt, "", username) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[97]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + require.NoError(tt, migrates.Down(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_99.go b/api/store/mongo/migrations/migration_99.go new file mode 100644 index 00000000000..d46671870d3 --- /dev/null +++ b/api/store/mongo/migrations/migration_99.go @@ -0,0 +1,45 @@ +package migrations + +import ( + "context" + + "github.com/sirupsen/logrus" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +var migration99 = migrate.Migration{ + Version: 99, + Description: "Update session.recorded to false when session has no events", + Up: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 99, + "action": "Up", + }).Info("Applying migration") + + _, err := db.Collection("sessions"). + UpdateMany( + ctx, + bson.M{ + "recorded": true, + "events.types": bson.M{"$size": 0}, + }, + bson.M{"$set": bson.M{"recorded": false}}, + ) + + return err + }), + Down: migrate.MigrationFunc(func(ctx context.Context, db *mongo.Database) error { + logrus.WithFields(logrus.Fields{ + "component": "migration", + "version": 99, + "action": "Down", + }).Info("Reverting migration") + + // NOTE: This migration shouldn't be reverted. + + return nil + }), +} diff --git a/api/store/mongo/migrations/migration_99_test.go b/api/store/mongo/migrations/migration_99_test.go new file mode 100644 index 00000000000..1b462eb18f4 --- /dev/null +++ b/api/store/mongo/migrations/migration_99_test.go @@ -0,0 +1,86 @@ +package migrations + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" +) + +func TestMigration99Up(t *testing.T) { + ctx := context.Background() + + cases := []struct { + description string + setup func() error + verify func(tt *testing.T) + }{ + { + description: "mark recorded to false for sessions without event types", + setup: func() error { + if _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "first", + "recorded": true, + "events": bson.M{"types": []any{}}, + }); err != nil { + return err + } + + if _, err := c. + Database("test"). + Collection("sessions"). + InsertOne(ctx, bson.M{ + "uid": "second", + "recorded": true, + "events": bson.M{"types": []any{ + "shell", + }}, + }); err != nil { + return err + } + + return nil + }, + verify: func(tt *testing.T) { + sessions := []map[string]any{} + + cursor, err := c.Database("test").Collection("sessions").Find(ctx, bson.M{}) + require.NoError(tt, err) + + ctx := context.Background() + + err = cursor.All(ctx, &sessions) + require.NoError(tt, err) + + recorded, ok := sessions[0]["recorded"] + require.True(tt, ok) + require.Equal(tt, false, recorded) + + recorded, ok = sessions[1]["recorded"] + require.True(tt, ok) + require.Equal(tt, true, recorded) + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + require.NoError(tt, tc.setup()) + + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[98]) + require.NoError(tt, migrates.Up(ctx, migrate.AllAvailable)) + + tc.verify(tt) + }) + } +} diff --git a/api/store/mongo/migrations/migration_9_test.go b/api/store/mongo/migrations/migration_9_test.go index 958adc0d829..c00d84a0259 100644 --- a/api/store/mongo/migrations/migration_9_test.go +++ b/api/store/mongo/migrations/migration_9_test.go @@ -4,35 +4,32 @@ import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" migrate "github.com/xakep666/mongo-migrate" "go.mongodb.org/mongo-driver/bson" ) func TestMigration9(t *testing.T) { - logrus.Info("Testing Migration 9 - Test if the device's name is in lowercase") + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - db := dbtest.DBServer{} - defer db.Stop() - - migrates := migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:8]...) - err := migrates.Up(migrate.AllAvailable) + migrates := migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:8]...) + err := migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) device := models.Device{ Name: "Test", } - _, err = db.Client().Database("test").Collection("devices").InsertOne(context.TODO(), device) + _, err = c.Database("test").Collection("devices").InsertOne(context.TODO(), device) assert.NoError(t, err) - migrates = migrate.NewMigrate(db.Client().Database("test"), GenerateMigrations()[:9]...) - err = migrates.Up(migrate.AllAvailable) + migrates = migrate.NewMigrate(c.Database("test"), GenerateMigrations()[:9]...) + err = migrates.Up(context.Background(), migrate.AllAvailable) assert.NoError(t, err) - err = db.Client().Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"name": "test"}).Decode(&device) + err = c.Database("test").Collection("devices").FindOne(context.TODO(), bson.M{"name": "test"}).Decode(&device) assert.NoError(t, err) } diff --git a/api/store/mongo/namespace.go b/api/store/mongo/namespace.go index 95e33db303d..8281e6aca81 100644 --- a/api/store/mongo/namespace.go +++ b/api/store/mongo/namespace.go @@ -2,69 +2,25 @@ package mongo import ( "context" + "fmt" "strings" "time" "github.com/shellhub-io/shellhub/api/pkg/gateway" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mongo/queries" - "github.com/shellhub-io/shellhub/pkg/api/paginator" + "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" + log "github.com/sirupsen/logrus" "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/bson/primitive" "go.mongodb.org/mongo-driver/mongo" ) -func (s *Store) NamespaceList(ctx context.Context, pagination paginator.Query, filters []models.Filter, export bool) ([]models.Namespace, int, error) { +func (s *Store) NamespaceList(ctx context.Context, opts ...store.QueryOption) ([]models.Namespace, int, error) { query := []bson.M{} - queryMatch, err := queries.BuildFilterQuery(filters) - if err != nil { - return nil, 0, FromMongoError(err) - } - - if len(queryMatch) > 0 { - query = append(query, queryMatch...) - } - - if export { - query = []bson.M{ - { - "$lookup": bson.M{ - "from": "devices", - "localField": "tenant_id", - "foreignField": "tenant_id", - "as": "devices", - }, - }, - { - "$addFields": bson.M{ - "devices": bson.M{"$size": "$devices"}, - }, - }, - { - "$lookup": bson.M{ - "from": "sessions", - "localField": "devices.uid", - "foreignField": "device_uid", - "as": "sessions", - }, - }, - { - "$addFields": bson.M{ - "sessions": bson.M{"$size": "$sessions"}, - }, - }, - } - } - - if len(queryMatch) > 0 { - query = append(query, queryMatch...) - } // Only match for the respective tenant if requested if id := gateway.IDFromContext(ctx); id != nil { - user, _, err := s.UserGetByID(ctx, id.ID, false) + user, err := s.UserResolve(ctx, store.UserIDResolver, id.ID) if err != nil { return nil, 0, err } @@ -80,15 +36,112 @@ func (s *Store) NamespaceList(ctx context.Context, pagination paginator.Query, f }) } - queryCount := query - queryCount = append(queryCount, bson.M{"$count": "count"}) - count, err := AggregateCount(ctx, s.db.Collection("namespaces"), queryCount) - if err != nil { - return nil, 0, err + query = append(query, + bson.M{ + "$addFields": bson.M{ + "members": bson.M{ + "$map": bson.M{ + "input": "$members", + "as": "member", + "in": bson.M{ + "$mergeObjects": bson.A{ + "$$member", + bson.M{ + "id": bson.M{ + "$toObjectId": "$$member.id", + }, + }, + }, + }, + }, + }, + }, + }, + bson.M{ + "$lookup": bson.M{ + "from": "users", + "localField": "members.id", + "foreignField": "_id", + "as": "userDetails", + }, + }, + bson.M{ + "$lookup": bson.M{ + "from": "user_invitations", + "localField": "members.id", + "foreignField": "_id", + "as": "invitationDetails", + }, + }, + bson.M{ + "$addFields": bson.M{ + "members": bson.M{ + "$map": bson.M{ + "input": "$members", + "as": "member", + "in": bson.M{ + "$let": bson.M{ + "vars": bson.M{ + "userDoc": bson.M{ + "$arrayElemAt": bson.A{ + bson.M{ + "$filter": bson.M{ + "input": "$userDetails", + "cond": bson.M{ + "$eq": bson.A{"$$this._id", "$$member.id"}, + }, + }, + }, + 0, + }, + }, + "inviteDoc": bson.M{ + "$arrayElemAt": bson.A{ + bson.M{ + "$filter": bson.M{ + "input": "$invitationDetails", + "cond": bson.M{ + "$eq": bson.A{"$$this._id", "$$member.id"}, + }, + }, + }, + 0, + }, + }, + }, + "in": bson.M{ + "$mergeObjects": bson.A{ + "$$member", + bson.M{ + "email": bson.M{ + "$ifNull": bson.A{ + "$$userDoc.email", + "$$inviteDoc.email", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + bson.M{ + "$unset": bson.A{"userDetails", "invitationDetails"}, + }, + ) + + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, 0, err + } } - if pagination.Page != 0 && pagination.PerPage != 0 { - query = append(query, queries.BuildPaginationQuery(pagination)...) + count, err := CountAllMatchingDocuments(ctx, s.db.Collection("namespaces"), query) + if err != nil { + return nil, 0, err } namespaces := make([]models.Namespace, 0) @@ -100,275 +153,300 @@ func (s *Store) NamespaceList(ctx context.Context, pagination paginator.Query, f for cursor.Next(ctx) { namespace := new(models.Namespace) - err = cursor.Decode(&namespace) - if err != nil { + if err := cursor.Decode(namespace); err != nil { return namespaces, count, err } - countDevice, err := s.db.Collection("devices").CountDocuments(ctx, bson.M{"tenant_id": namespace.TenantID, "status": "accepted"}) - if err != nil { - return namespaces, 0, err - } - - namespace.DevicesCount = int(countDevice) - namespaces = append(namespaces, *namespace) } return namespaces, count, err } -func (s *Store) NamespaceGet(ctx context.Context, tenantID string) (*models.Namespace, error) { - var ns *models.Namespace - - if err := s.cache.Get(ctx, strings.Join([]string{"namespace", tenantID}, "/"), &ns); err != nil { - logrus.Error(err) +func (s *Store) NamespaceResolve(ctx context.Context, resolver store.NamespaceResolver, value string) (*models.Namespace, error) { + namespace := new(models.Namespace) + if _ = s.cache.Get(ctx, "namespace"+"/"+value, namespace); namespace != nil && namespace.TenantID != "" { + return namespace, nil } - if ns != nil { - goto count + matchStage := bson.M{} + switch resolver { + case store.NamespaceTenantIDResolver: + matchStage["tenant_id"] = value + case store.NamespaceNameResolver: + matchStage["name"] = value } - if err := s.db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": tenantID}).Decode(&ns); err != nil { - return ns, FromMongoError(err) + query := []bson.M{ + { + "$match": matchStage, + }, + { + "$addFields": bson.M{ + "members": bson.M{ + "$map": bson.M{ + "input": "$members", + "as": "member", + "in": bson.M{ + "$mergeObjects": bson.A{ + "$$member", + bson.M{ + "id": bson.M{ + "$toObjectId": "$$member.id", + }, + }, + }, + }, + }, + }, + }, + }, + { + "$lookup": bson.M{ + "from": "users", + "localField": "members.id", + "foreignField": "_id", + "as": "userDetails", + }, + }, + { + "$lookup": bson.M{ + "from": "user_invitations", + "localField": "members.id", + "foreignField": "_id", + "as": "invitationDetails", + }, + }, + { + "$addFields": bson.M{ + "members": bson.M{ + "$map": bson.M{ + "input": "$members", + "as": "member", + "in": bson.M{ + "$let": bson.M{ + "vars": bson.M{ + "userDoc": bson.M{ + "$arrayElemAt": bson.A{ + bson.M{ + "$filter": bson.M{ + "input": "$userDetails", + "cond": bson.M{ + "$eq": bson.A{"$$this._id", "$$member.id"}, + }, + }, + }, + 0, + }, + }, + "inviteDoc": bson.M{ + "$arrayElemAt": bson.A{ + bson.M{ + "$filter": bson.M{ + "input": "$invitationDetails", + "cond": bson.M{ + "$eq": bson.A{"$$this._id", "$$member.id"}, + }, + }, + }, + 0, + }, + }, + }, + "in": bson.M{ + "$mergeObjects": bson.A{ + "$$member", + bson.M{ + "email": bson.M{ + "$ifNull": bson.A{ + "$$userDoc.email", + "$$inviteDoc.email", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + "$unset": bson.A{"userDetails", "invitationDetails"}, + }, } - if err := s.cache.Set(ctx, strings.Join([]string{"namespace", tenantID}, "/"), ns, time.Minute); err != nil { - logrus.Error(err) + cursor, err := s.db.Collection("namespaces").Aggregate(ctx, query) + if err != nil { + return nil, FromMongoError(err) } + defer cursor.Close(ctx) -count: - countDevice, err := s.db.Collection("devices").CountDocuments(ctx, bson.M{"tenant_id": tenantID, "status": "accepted"}) - if err != nil { + cursor.Next(ctx) + + namespace = nil + if err := cursor.Decode(&namespace); err != nil { return nil, FromMongoError(err) } - ns.DevicesCount = int(countDevice) + if err := s.cache.Set(ctx, "namespace"+"/"+value, namespace, time.Minute); err != nil { + log.Error(err) + } - return ns, nil + return namespace, nil } -func (s *Store) NamespaceGetByName(ctx context.Context, name string) (*models.Namespace, error) { - var ns *models.Namespace - - if err := s.cache.Get(ctx, strings.Join([]string{"namespace", name}, "/"), &ns); err != nil { - logrus.Error(err) - } +func (s *Store) NamespaceGetPreferred(ctx context.Context, userID string) (*models.Namespace, error) { + filter := bson.M{"members.id": userID} - if ns != nil { - return ns, nil + if user, _ := s.UserResolve(ctx, store.UserIDResolver, userID); user != nil { + if user.Preferences.PreferredNamespace != "" { + filter["tenant_id"] = user.Preferences.PreferredNamespace + } } - if err := s.db.Collection("namespaces").FindOne(ctx, bson.M{"name": name}).Decode(&ns); err != nil { + ns := new(models.Namespace) + if err := s.db.Collection("namespaces").FindOne(ctx, filter).Decode(ns); err != nil { return nil, FromMongoError(err) } return ns, nil } -func (s *Store) NamespaceCreate(ctx context.Context, namespace *models.Namespace) (*models.Namespace, error) { - session, err := s.db.Client().StartSession() - if err != nil { - return nil, err +func (s *Store) NamespaceCreate(ctx context.Context, namespace *models.Namespace) (string, error) { + namespace.CreatedAt = clock.Now() + if _, err := s.db.Collection("namespaces").InsertOne(ctx, namespace); err != nil { + return "", err } - defer session.EndSession(ctx) - - if _, err := session.WithTransaction(ctx, func(sessCtx mongo.SessionContext) (interface{}, error) { - _, err := s.db.Collection("namespaces").InsertOne(sessCtx, namespace) - if err != nil { - return nil, err - } - objID, err := primitive.ObjectIDFromHex(namespace.Owner) - if err != nil { - return nil, FromMongoError(err) - } - - if _, err := s.db.Collection("users").UpdateOne(sessCtx, bson.M{"_id": objID}, bson.M{"$inc": bson.M{"namespaces": 1}}); err != nil { - return nil, FromMongoError(err) - } + return namespace.TenantID, nil +} - return nil, nil - }); err != nil { - return nil, err +func (s *Store) NamespaceConflicts(ctx context.Context, target *models.NamespaceConflicts) ([]string, bool, error) { + pipeline := []bson.M{ + { + "$match": bson.M{ + "$or": []bson.M{ + {"name": target.Name}, + }, + }, + }, } - return namespace, err -} - -func (s *Store) NamespaceDelete(ctx context.Context, tenantID string) error { - session, err := s.db.Client().StartSession() + cursor, err := s.db.Collection("namespaces").Aggregate(ctx, pipeline) if err != nil { - return err + return nil, false, FromMongoError(err) } - defer session.EndSession(ctx) - - if _, err := session.WithTransaction(ctx, func(sessCtx mongo.SessionContext) (interface{}, error) { - ns, err := s.NamespaceGet(ctx, tenantID) - if err != nil { - return nil, err - } - - if _, err := s.db.Collection("namespaces").DeleteOne(sessCtx, bson.M{"tenant_id": tenantID}); err != nil { - return nil, FromMongoError(err) - } - - if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { - logrus.Error(err) - } - - collections := []string{"devices", "sessions", "connected_devices", "firewall_rules", "public_keys", "recorded_sessions"} - for _, collection := range collections { - if _, err := s.db.Collection(collection).DeleteMany(sessCtx, bson.M{"tenant_id": tenantID}); err != nil { - return nil, FromMongoError(err) - } - } + defer cursor.Close(ctx) - objID, err := primitive.ObjectIDFromHex(ns.Owner) - if err != nil { - return nil, FromMongoError(err) + namespace := new(models.NamespaceConflicts) + conflicts := make([]string, 0) + for cursor.Next(ctx) { + if err := cursor.Decode(&namespace); err != nil { + return nil, false, FromMongoError(err) } - if _, err := s.db.Collection("users").UpdateOne(sessCtx, bson.M{"_id": objID}, bson.M{"$inc": bson.M{"namespaces": -1}}); err != nil { - return nil, FromMongoError(err) + if namespace.Name == target.Name { + conflicts = append(conflicts, "name") } - - return nil, nil - }); err != nil { - return err - } - - return nil -} - -func (s *Store) NamespaceRename(ctx context.Context, tenantID string, name string) (*models.Namespace, error) { - if _, err := s.db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": tenantID}, bson.M{"$set": bson.M{"name": name}}); err != nil { - return nil, FromMongoError(err) - } - - if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { - logrus.Error(err) } - return s.NamespaceGet(ctx, tenantID) + return conflicts, len(conflicts) > 0, nil } -func (s *Store) NamespaceUpdate(ctx context.Context, tenantID string, namespace *models.Namespace) error { - ns, err := s.db.Collection("namespaces").UpdateOne( - ctx, - bson.M{ - "tenant_id": tenantID, - }, - bson.M{ - "$set": bson.M{ - "name": namespace.Name, - "max_devices": namespace.MaxDevices, - "settings.session_record": namespace.Settings.SessionRecord, - }, - }, - ) +func (s *Store) NamespaceUpdate(ctx context.Context, namespace *models.Namespace) error { + res, err := s.db. + Collection("namespaces"). + UpdateOne(ctx, bson.M{"tenant_id": namespace.TenantID}, bson.M{"$set": namespace}) if err != nil { return FromMongoError(err) } - if ns.MatchedCount < 1 { + if res.MatchedCount < 1 { return store.ErrNoDocuments } - if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { - logrus.Error(err) + if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", namespace.TenantID}, "/")); err != nil { + log.Error(err) } return nil } -func (s *Store) NamespaceAddMember(ctx context.Context, tenantID string, memberID string, memberRole string) (*models.Namespace, error) { - result := s.db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": tenantID, "members": bson.M{"$elemMatch": bson.M{"id": memberID}}}) - if result.Err() == nil { - return nil, ErrNamespaceDuplicatedMember - } - - _, err := s.db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": tenantID}, bson.M{"$addToSet": bson.M{"members": bson.M{"id": memberID, "role": memberRole}}}) - if err != nil { - return nil, FromMongoError(err) - } - - if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { - logrus.Error(err) +func (s *Store) NamespaceDelete(ctx context.Context, namespace *models.Namespace) error { + deletedCount, err := s.NamespaceDeleteMany(ctx, []string{namespace.TenantID}) + switch { + case err != nil: + return err + case deletedCount < 1: + return store.ErrNoDocuments + default: + return nil } - - return s.NamespaceGet(ctx, tenantID) } -func (s *Store) NamespaceRemoveMember(ctx context.Context, tenantID string, memberID string) (*models.Namespace, error) { - ns, err := s.db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": tenantID}, bson.M{"$pull": bson.M{"members": bson.M{"id": memberID}}}) +func (s *Store) NamespaceDeleteMany(ctx context.Context, tenantIDs []string) (int64, error) { + mongoSession, err := s.db.Client().StartSession() if err != nil { - return nil, FromMongoError(err) + return 0, FromMongoError(err) } - switch { - // tenant not found - case ns.MatchedCount < 1: - return nil, store.ErrNoDocuments - // member not found - case ns.ModifiedCount < 1: - return nil, ErrUserNotFound - } + defer mongoSession.EndSession(ctx) - if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { - logrus.Error(err) - } + fn := func(sessCtx mongo.SessionContext) (any, error) { + r, err := s.db.Collection("namespaces").DeleteMany(sessCtx, bson.M{"tenant_id": bson.M{"$in": tenantIDs}}) + if err != nil { + return 0, FromMongoError(err) + } - return s.NamespaceGet(ctx, tenantID) -} + for _, tenantID := range tenantIDs { + if err := s.cache.Delete(sessCtx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { + log.Error(err) + } + } -func (s *Store) NamespaceEditMember(ctx context.Context, tenantID string, memberID string, memberNewRole string) error { - ns, err := s.db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": tenantID, "members.id": memberID}, bson.M{"$set": bson.M{"members.$.role": memberNewRole}}) - if err != nil { - return FromMongoError(err) - } + collections := []string{"devices", "sessions", "firewall_rules", "public_keys", "recorded_sessions", "api_keys", "tunnels"} + for _, collection := range collections { + if _, err := s.db.Collection(collection).DeleteMany(sessCtx, bson.M{"tenant_id": bson.M{"$in": tenantIDs}}); err != nil { + return 0, FromMongoError(err) + } + } - if ns.MatchedCount < 1 { - return ErrUserNotFound - } + _, err = s.db. + Collection("users"). + UpdateMany(ctx, bson.M{"preferred_namespace": bson.M{"$in": tenantIDs}}, bson.M{"$set": bson.M{"preferred_namespace": ""}}) + if err != nil { + return 0, FromMongoError(err) + } - if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { - logrus.Error(err) + return r.DeletedCount, nil } - return nil + deletedCount, err := mongoSession.WithTransaction(ctx, fn) + + return deletedCount.(int64), err } -func (s *Store) NamespaceGetFirst(ctx context.Context, id string) (*models.Namespace, error) { - ns := new(models.Namespace) - if err := s.db.Collection("namespaces").FindOne(ctx, bson.M{"members": bson.M{"$elemMatch": bson.M{"id": id}}}).Decode(&ns); err != nil { - return nil, FromMongoError(err) +func (s *Store) NamespaceIncrementDeviceCount(ctx context.Context, tenantID string, status models.DeviceStatus, count int64) error { + update := bson.M{ + "$inc": bson.M{ + fmt.Sprintf("devices_%s_count", string(status)): count, + }, } - return ns, nil -} - -func (s *Store) NamespaceSetSessionRecord(ctx context.Context, sessionRecord bool, tenantID string) error { - ns, err := s.db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": tenantID}, bson.M{"$set": bson.M{"settings.session_record": sessionRecord}}) + r, err := s.db.Collection("namespaces").UpdateOne(ctx, bson.M{"tenant_id": tenantID}, update) if err != nil { return FromMongoError(err) } - if ns.MatchedCount < 1 { + if r.MatchedCount == 0 { return store.ErrNoDocuments } - return nil -} - -func (s *Store) NamespaceGetSessionRecord(ctx context.Context, tenantID string) (bool, error) { - var settings struct { - Settings *models.NamespaceSettings `json:"settings" bson:"settings"` - } - - if err := s.db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": tenantID}).Decode(&settings); err != nil { - return false, FromMongoError(err) + if err := s.cache.Delete(ctx, strings.Join([]string{"namespace", tenantID}, "/")); err != nil { + log.Error(err) } - return settings.Settings.SessionRecord, nil + return nil } diff --git a/api/store/mongo/namespace_test.go b/api/store/mongo/namespace_test.go index 9739c3220f4..6a5266385d2 100644 --- a/api/store/mongo/namespace_test.go +++ b/api/store/mongo/namespace_test.go @@ -1,4 +1,4 @@ -package mongo +package mongo_test import ( "context" @@ -6,14 +6,15 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmocks "github.com/shellhub-io/shellhub/pkg/clock/mocks" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson" ) func TestNamespaceList(t *testing.T) { @@ -25,79 +26,100 @@ func TestNamespaceList(t *testing.T) { cases := []struct { description string - page paginator.Query - filters []models.Filter - export bool + opts []store.QueryOption fixtures []string expected Expected }{ { description: "succeeds when namespaces list is not empty", - page: paginator.Query{Page: -1, PerPage: -1}, - filters: []models.Filter{}, - export: false, - fixtures: []string{fixtures.FixtureNamespaces}, + opts: []store.QueryOption{ + s.Options().Match(&query.Filters{}), + s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1}), + }, + fixtures: []string{fixtureNamespaces}, expected: Expected{ ns: []models.Namespace{ { - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-1", - Owner: "507f1f77bcf86cd799439011", - TenantID: "00000000-0000-4000-0000-000000000000", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "namespace-1", + Owner: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + DevicesAcceptedCount: 15, + DevicesPendingCount: 3, + DevicesRejectedCount: 2, + DevicesRemovedCount: 1, Members: []models.Member{ { - ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, + ID: "507f1f77bcf86cd799439011", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleOwner, }, { - ID: "6509e169ae6144b2f56bf288", - Role: guard.RoleObserver, + ID: "6509e169ae6144b2f56bf288", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleObserver, }, }, MaxDevices: -1, Settings: &models.NamespaceSettings{SessionRecord: true}, }, { - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-2", - Owner: "6509e169ae6144b2f56bf288", - TenantID: "00000000-0000-4001-0000-000000000000", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "namespace-2", + Owner: "6509e169ae6144b2f56bf288", + TenantID: "00000000-0000-4001-0000-000000000000", + DevicesAcceptedCount: 8, + DevicesPendingCount: 1, + DevicesRejectedCount: 0, + DevicesRemovedCount: 2, Members: []models.Member{ { - ID: "6509e169ae6144b2f56bf288", - Role: guard.RoleOwner, + ID: "6509e169ae6144b2f56bf288", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleOwner, }, { - ID: "907f1f77bcf86cd799439022", - Role: guard.RoleOperator, + ID: "907f1f77bcf86cd799439022", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleOperator, }, }, MaxDevices: 10, Settings: &models.NamespaceSettings{SessionRecord: false}, }, { - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-3", - Owner: "657b0e3bff780d625f74e49a", - TenantID: "00000000-0000-4002-0000-000000000000", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "namespace-3", + Owner: "657b0e3bff780d625f74e49a", + TenantID: "00000000-0000-4002-0000-000000000000", + DevicesAcceptedCount: 342, + DevicesPendingCount: 0, + DevicesRejectedCount: 2, + DevicesRemovedCount: 4, Members: []models.Member{ { - ID: "657b0e3bff780d625f74e49a", - Role: guard.RoleOwner, + ID: "657b0e3bff780d625f74e49a", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleOwner, }, }, MaxDevices: 3, Settings: &models.NamespaceSettings{SessionRecord: true}, }, { - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-4", - Owner: "6577267d8752d05270a4c07d", - TenantID: "00000000-0000-4003-0000-000000000000", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "namespace-4", + Owner: "6577267d8752d05270a4c07d", + TenantID: "00000000-0000-4003-0000-000000000000", + DevicesAcceptedCount: 25, + DevicesPendingCount: 5, + DevicesRejectedCount: 3, + DevicesRemovedCount: 0, Members: []models.Member{ { - ID: "6577267d8752d05270a4c07d", - Role: guard.RoleOwner, + ID: "6577267d8752d05270a4c07d", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleOwner, }, }, MaxDevices: -1, @@ -110,12 +132,6 @@ func TestNamespaceList(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - // Due to the non-deterministic order of applying fixtures when dealing with multiple datasets, // we ensure that both the expected and result arrays are correctly sorted. sort := func(ns []models.Namespace) { @@ -126,10 +142,14 @@ func TestNamespaceList(t *testing.T) { for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - ns, count, err := mongostore.NamespaceList(context.TODO(), tc.page, tc.filters, tc.export) + ns, count, err := s.NamespaceList(ctx, tc.opts...) sort(tc.expected.ns) sort(ns) assert.Equal(t, tc.expected, Expected{ns: ns, count: count, err: err}) @@ -137,112 +157,101 @@ func TestNamespaceList(t *testing.T) { } } -func TestNamespaceGet(t *testing.T) { +func TestNamespaceResolve(t *testing.T) { type Expected struct { - ns *models.Namespace - err error + namespace *models.Namespace + err error } cases := []struct { description string - tenant string + resolver store.NamespaceResolver + value string fixtures []string expected Expected }{ { - description: "fails when tenant is not found", - tenant: "nonexistent", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices}, + description: "fails when namespace not found by tenant ID", + resolver: store.NamespaceTenantIDResolver, + value: "nonexistent-tenant-id", + fixtures: []string{fixtureNamespaces, fixtureUsers}, expected: Expected{ - ns: nil, - err: store.ErrNoDocuments, + namespace: nil, + err: store.ErrNoDocuments, }, }, { - description: "succeeds when tenant is found", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureNamespaces, fixtures.FixtureDevices}, + description: "succeeds resolving namespace by tenant ID", + resolver: store.NamespaceTenantIDResolver, + value: "00000000-0000-4000-0000-000000000000", + fixtures: []string{fixtureNamespaces, fixtureUsers}, expected: Expected{ - ns: &models.Namespace{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-1", - Owner: "507f1f77bcf86cd799439011", - TenantID: "00000000-0000-4000-0000-000000000000", + namespace: &models.Namespace{ + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "namespace-1", + Owner: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + DevicesAcceptedCount: 15, + DevicesPendingCount: 3, + DevicesRejectedCount: 2, + DevicesRemovedCount: 1, Members: []models.Member{ { - ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, + ID: "507f1f77bcf86cd799439011", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", }, { - ID: "6509e169ae6144b2f56bf288", - Role: guard.RoleObserver, + ID: "6509e169ae6144b2f56bf288", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleObserver, + Email: "maria.garcia@test.com", }, }, - MaxDevices: -1, - Settings: &models.NamespaceSettings{SessionRecord: true}, - DevicesCount: 3, + MaxDevices: -1, + Settings: &models.NamespaceSettings{SessionRecord: true}, }, err: nil, }, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - ns, err := mongostore.NamespaceGet(context.TODO(), tc.tenant) - assert.Equal(t, tc.expected, Expected{ns: ns, err: err}) - }) - } -} - -func TestNamespaceGetByName(t *testing.T) { - type Expected struct { - ns *models.Namespace - err error - } - - cases := []struct { - description string - name string - fixtures []string - expected Expected - }{ { - description: "fails when namespace is not found", - name: "nonexistent", - fixtures: []string{fixtures.FixtureNamespaces}, + description: "fails when namespace not found by name", + resolver: store.NamespaceNameResolver, + value: "nonexistent-namespace", + fixtures: []string{fixtureNamespaces, fixtureUsers}, expected: Expected{ - ns: nil, - err: store.ErrNoDocuments, + namespace: nil, + err: store.ErrNoDocuments, }, }, { - description: "succeeds when namespace is found", - name: "namespace-1", - fixtures: []string{fixtures.FixtureNamespaces}, + description: "succeeds resolving namespace by name", + resolver: store.NamespaceNameResolver, + value: "namespace-1", + fixtures: []string{fixtureNamespaces, fixtureUsers}, expected: Expected{ - ns: &models.Namespace{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-1", - Owner: "507f1f77bcf86cd799439011", - TenantID: "00000000-0000-4000-0000-000000000000", + namespace: &models.Namespace{ + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "namespace-1", + Owner: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + DevicesAcceptedCount: 15, + DevicesPendingCount: 3, + DevicesRejectedCount: 2, + DevicesRemovedCount: 1, Members: []models.Member{ { - ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, + ID: "507f1f77bcf86cd799439011", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleOwner, + Email: "john.doe@test.com", }, { - ID: "6509e169ae6144b2f56bf288", - Role: guard.RoleObserver, + ID: "6509e169ae6144b2f56bf288", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleObserver, + Email: "maria.garcia@test.com", }, }, MaxDevices: -1, @@ -253,24 +262,22 @@ func TestNamespaceGetByName(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - ns, err := mongostore.NamespaceGetByName(context.TODO(), tc.name) - assert.Equal(t, tc.expected, Expected{ns: ns, err: err}) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + namespace, err := s.NamespaceResolve(ctx, tc.resolver, tc.value) + assert.Equal(t, tc.expected, Expected{namespace: namespace, err: err}) }) } } -func TestNamespaceGetFirst(t *testing.T) { +func TestNamespaceGetPreferred(t *testing.T) { type Expected struct { ns *models.Namespace err error @@ -278,37 +285,43 @@ func TestNamespaceGetFirst(t *testing.T) { cases := []struct { description string - member string + memberID string fixtures []string expected Expected }{ { description: "fails when member is not found", - member: "000000000000000000000000", - fixtures: []string{fixtures.FixtureNamespaces}, + memberID: "000000000000000000000000", + fixtures: []string{fixtureNamespaces}, expected: Expected{ ns: nil, err: store.ErrNoDocuments, }, }, { - description: "succeeds when member is found", - member: "507f1f77bcf86cd799439011", - fixtures: []string{fixtures.FixtureNamespaces}, + description: "succeeds when member is found and tenantID is empty", + memberID: "507f1f77bcf86cd799439011", + fixtures: []string{fixtureNamespaces}, expected: Expected{ ns: &models.Namespace{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-1", - Owner: "507f1f77bcf86cd799439011", - TenantID: "00000000-0000-4000-0000-000000000000", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "namespace-1", + Owner: "507f1f77bcf86cd799439011", + TenantID: "00000000-0000-4000-0000-000000000000", + DevicesAcceptedCount: 15, + DevicesPendingCount: 3, + DevicesRejectedCount: 2, + DevicesRemovedCount: 1, Members: []models.Member{ { - ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, + ID: "507f1f77bcf86cd799439011", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleOwner, }, { - ID: "6509e169ae6144b2f56bf288", - Role: guard.RoleObserver, + ID: "6509e169ae6144b2f56bf288", + AddedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Role: authorizer.RoleObserver, }, }, MaxDevices: -1, @@ -319,27 +332,31 @@ func TestNamespaceGetFirst(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - ns, err := mongostore.NamespaceGetFirst(context.TODO(), tc.member) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + ns, err := s.NamespaceGetPreferred(ctx, tc.memberID) assert.Equal(t, tc.expected, Expected{ns: ns, err: err}) }) } } func TestNamespaceCreate(t *testing.T) { + now := time.Now() + + clockMock := new(clockmocks.Clock) + clockMock.On("Now").Return(now) + clock.DefaultBackend = clockMock + type Expected struct { - ns *models.Namespace - err error + tenantID string + err error } cases := []struct { @@ -357,7 +374,7 @@ func TestNamespaceCreate(t *testing.T) { Members: []models.Member{ { ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, + Role: authorizer.RoleOwner, }, }, MaxDevices: -1, @@ -365,492 +382,295 @@ func TestNamespaceCreate(t *testing.T) { }, fixtures: []string{}, expected: Expected{ - ns: &models.Namespace{ - Name: "namespace-1", - Owner: "507f1f77bcf86cd799439011", - TenantID: "00000000-0000-4000-0000-000000000000", - Members: []models.Member{ - { - ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, - }, - }, - MaxDevices: -1, - Settings: &models.NamespaceSettings{SessionRecord: true}, - }, - err: nil, + tenantID: "00000000-0000-4000-0000-000000000000", + err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - ns, err := mongostore.NamespaceCreate(context.TODO(), tc.ns) - assert.Equal(t, tc.expected, Expected{ns: ns, err: err}) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + tenantID, err := s.NamespaceCreate(ctx, tc.ns) + assert.Equal(t, tc.expected, Expected{tenantID: tenantID, err: err}) }) } } -func TestNamespaceRename(t *testing.T) { +func TestNamespaceConflicts(t *testing.T) { type Expected struct { - ns *models.Namespace - err error + conflicts []string + ok bool + err error } cases := []struct { description string - tenant string - name string + target *models.NamespaceConflicts fixtures []string expected Expected }{ { - description: "fails when tenant is not found", - tenant: "nonexistent", - name: "edited-namespace", - fixtures: []string{fixtures.FixtureNamespaces}, - expected: Expected{ - ns: nil, - err: store.ErrNoDocuments, - }, + description: "no conflicts when target is empty", + target: &models.NamespaceConflicts{}, + fixtures: []string{fixtureNamespaces}, + expected: Expected{[]string{}, false, nil}, }, { - description: "succeeds when tenant is found", - tenant: "00000000-0000-4000-0000-000000000000", - name: "edited-namespace", - fixtures: []string{fixtures.FixtureNamespaces}, - expected: Expected{ - ns: &models.Namespace{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "edited-namespace", - Owner: "507f1f77bcf86cd799439011", - TenantID: "00000000-0000-4000-0000-000000000000", - Members: []models.Member{ - { - ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, - }, - { - ID: "6509e169ae6144b2f56bf288", - Role: guard.RoleObserver, - }, - }, - MaxDevices: -1, - Settings: &models.NamespaceSettings{SessionRecord: true}, - DevicesCount: 0, - }, - err: nil, - }, + description: "no conflicts with non existing name", + target: &models.NamespaceConflicts{Name: "nonexistent-namespace"}, + fixtures: []string{fixtureNamespaces}, + expected: Expected{[]string{}, false, nil}, + }, + { + description: "conflict detected with existing name", + target: &models.NamespaceConflicts{Name: "namespace-1"}, + fixtures: []string{fixtureNamespaces}, + expected: Expected{[]string{"name"}, true, nil}, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - ns, err := mongostore.NamespaceRename(context.TODO(), tc.tenant, tc.name) - assert.Equal(t, tc.expected, Expected{ns: ns, err: err}) + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) + + conflicts, ok, err := s.NamespaceConflicts(ctx, tc.target) + require.Equal(t, tc.expected, Expected{conflicts, ok, err}) }) } } -func TestNamespaceUpdate(t *testing.T) { +func TestStore_NamespaceUpdate(t *testing.T) { cases := []struct { description string - tenant string - ns *models.Namespace + namespace *models.Namespace fixtures []string expected error }{ { - description: "fails when tenant is not found", - tenant: "nonexistent", - ns: &models.Namespace{ - Name: "edited-namespace", - MaxDevices: 3, - Settings: &models.NamespaceSettings{SessionRecord: true}, + description: "fails when namespace is not found", + namespace: &models.Namespace{ + TenantID: "nonexistent", + Name: "edited-namespace", }, - fixtures: []string{fixtures.FixtureNamespaces}, + fixtures: []string{fixtureNamespaces}, expected: store.ErrNoDocuments, }, { - description: "succeeds when tenant is found", - tenant: "00000000-0000-4000-0000-000000000000", - ns: &models.Namespace{ - Name: "edited-namespace", - MaxDevices: 3, - Settings: &models.NamespaceSettings{SessionRecord: true}, + description: "succeeds when namespace is found", + namespace: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "edited-namespace", }, - fixtures: []string{fixtures.FixtureNamespaces}, + fixtures: []string{fixtureNamespaces}, expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - err := mongostore.NamespaceUpdate(context.TODO(), tc.tenant, tc.ns) + err := s.NamespaceUpdate(ctx, tc.namespace) assert.Equal(t, tc.expected, err) }) } } -func TestNamespaceDelete(t *testing.T) { +func TestStore_NamespaceDelete(t *testing.T) { cases := []struct { description string - tenant string + namespace *models.Namespace fixtures []string expected error }{ { description: "fails when namespace is not found", - tenant: "nonexistent", - fixtures: []string{fixtures.FixtureNamespaces}, - expected: store.ErrNoDocuments, + namespace: &models.Namespace{ + TenantID: "nonexistent", + }, + fixtures: []string{fixtureNamespaces}, + expected: store.ErrNoDocuments, }, { description: "succeeds when namespace is found", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureNamespaces}, - expected: nil, + namespace: &models.Namespace{ + TenantID: "00000000-0000-4000-0000-000000000000", + }, + fixtures: []string{fixtureNamespaces}, + expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - err := mongostore.NamespaceDelete(context.TODO(), tc.tenant) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + err := s.NamespaceDelete(ctx, tc.namespace) assert.Equal(t, tc.expected, err) }) } } -func TestNamespaceAddMember(t *testing.T) { - type Expected struct { - ns *models.Namespace - err error - } - +func TestStore_NamespaceDeleteMany(t *testing.T) { cases := []struct { - description string - tenant string - member string - role string - fixtures []string - expected Expected + description string + tenantIDs []string + fixtures []string + expectedCount int64 + expectedError error }{ { - description: "fails when tenant is not found", - tenant: "nonexistent", - member: "6509de884238881ac1b2b289", - role: guard.RoleObserver, - fixtures: []string{fixtures.FixtureNamespaces}, - expected: Expected{ - ns: nil, - err: store.ErrNoDocuments, - }, + description: "fails when no namespaces are found", + tenantIDs: []string{"nonexistent1", "nonexistent2"}, + fixtures: []string{fixtureNamespaces}, + expectedCount: 0, + expectedError: nil, }, { - description: "fails when member has already been added", - tenant: "00000000-0000-4000-0000-000000000000", - member: "6509e169ae6144b2f56bf288", - role: guard.RoleObserver, - fixtures: []string{fixtures.FixtureNamespaces}, - expected: Expected{ - ns: nil, - err: ErrNamespaceDuplicatedMember, - }, + description: "succeeds deleting single namespace", + tenantIDs: []string{"00000000-0000-4000-0000-000000000000"}, + fixtures: []string{fixtureNamespaces}, + expectedCount: 1, + expectedError: nil, }, { - description: "succeeds when tenant is found", - tenant: "00000000-0000-4000-0000-000000000000", - member: "6509de884238881ac1b2b289", - role: guard.RoleObserver, - fixtures: []string{fixtures.FixtureNamespaces}, - expected: Expected{ - ns: &models.Namespace{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-1", - Owner: "507f1f77bcf86cd799439011", - TenantID: "00000000-0000-4000-0000-000000000000", - Members: []models.Member{ - { - ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, - }, - { - ID: "6509e169ae6144b2f56bf288", - Role: guard.RoleObserver, - }, - { - ID: "6509de884238881ac1b2b289", - Role: guard.RoleObserver, - }, - }, - MaxDevices: -1, - Settings: &models.NamespaceSettings{SessionRecord: true}, - DevicesCount: 0, - }, - err: nil, - }, + description: "succeeds deleting multiple namespaces", + tenantIDs: []string{"00000000-0000-4000-0000-000000000000", "00000000-0000-4001-0000-000000000000", "00000000-0000-4002-0000-000000000000"}, + fixtures: []string{fixtureNamespaces}, + expectedCount: 3, + expectedError: nil, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - ns, err := mongostore.NamespaceAddMember(context.TODO(), tc.tenant, tc.member, tc.role) - assert.Equal(t, tc.expected, Expected{ns: ns, err: err}) - }) - } -} - -func TestNamespaceEditMember(t *testing.T) { - cases := []struct { - description string - tenant string - member string - role string - fixtures []string - expected error - }{ { - description: "fails when user is not found", - tenant: "nonexistent", - member: "000000000000000000000000", - role: guard.RoleObserver, - fixtures: []string{fixtures.FixtureNamespaces}, - expected: ErrUserNotFound, + description: "succeeds with mix of valid and invalid tenant IDs", + tenantIDs: []string{"00000000-0000-4000-0000-000000000000", "nonexistent"}, + fixtures: []string{fixtureNamespaces}, + expectedCount: 1, + expectedError: nil, }, { - description: "succeeds when tenant and user is found", - tenant: "00000000-0000-4000-0000-000000000000", - member: "6509e169ae6144b2f56bf288", - role: guard.RoleOperator, - fixtures: []string{fixtures.FixtureNamespaces}, - expected: nil, + description: "handles empty tenant IDs list", + tenantIDs: []string{}, + fixtures: []string{fixtureNamespaces}, + expectedCount: 0, + expectedError: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - err := mongostore.NamespaceEditMember(context.TODO(), tc.tenant, tc.member, tc.role) - assert.Equal(t, tc.expected, err) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + deletedCount, err := s.NamespaceDeleteMany(ctx, tc.tenantIDs) + + if tc.expectedError != nil { + assert.Equal(t, tc.expectedError, err) + } else { + assert.NoError(t, err) + } + + assert.Equal(t, tc.expectedCount, deletedCount) }) } } -func TestNamespaceRemoveMember(t *testing.T) { +func TestNamespaceIncrementDeviceCount(t *testing.T) { type Expected struct { - ns *models.Namespace - err error + acceptedCount int64 + pendingCount int64 + rejectedCount int64 + err error } cases := []struct { description string tenant string - member string + status models.DeviceStatus + count int64 fixtures []string expected Expected }{ { description: "fails when tenant is not found", tenant: "nonexistent", - member: "6509de884238881ac1b2b289", - fixtures: []string{fixtures.FixtureNamespaces}, + status: models.DeviceStatusAccepted, + count: 5, + fixtures: []string{fixtureNamespaces}, expected: Expected{ - ns: nil, err: store.ErrNoDocuments, }, }, { - description: "fails when member is not found", + description: "succeeds when incrementing devices count", tenant: "00000000-0000-4000-0000-000000000000", - member: "nonexistent", - fixtures: []string{fixtures.FixtureNamespaces}, + status: models.DeviceStatusAccepted, + count: 5, + fixtures: []string{fixtureNamespaces}, expected: Expected{ - ns: nil, - err: ErrUserNotFound, + acceptedCount: 20, // 15 + 5 + pendingCount: 3, + rejectedCount: 2, + err: nil, }, }, { - description: "succeeds when tenant and user is found", + description: "succeeds when decrementing devices count", tenant: "00000000-0000-4000-0000-000000000000", - member: "6509e169ae6144b2f56bf288", - fixtures: []string{fixtures.FixtureNamespaces}, + status: models.DeviceStatusPending, + count: -2, + fixtures: []string{fixtureNamespaces}, expected: Expected{ - ns: &models.Namespace{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Name: "namespace-1", - Owner: "507f1f77bcf86cd799439011", - TenantID: "00000000-0000-4000-0000-000000000000", - Members: []models.Member{ - { - ID: "507f1f77bcf86cd799439011", - Role: guard.RoleOwner, - }, - }, - MaxDevices: -1, - Settings: &models.NamespaceSettings{SessionRecord: true}, - DevicesCount: 0, - }, - err: nil, + acceptedCount: 15, + pendingCount: 1, // 3 - 2 + rejectedCount: 2, + err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - ns, err := mongostore.NamespaceRemoveMember(context.TODO(), tc.tenant, tc.member) - assert.Equal(t, tc.expected, Expected{ns: ns, err: err}) - }) - } -} - -func TestNamespaceSetSessionRecord(t *testing.T) { - cases := []struct { - description string - tenant string - sessionRec bool - fixtures []string - expected error - }{ - { - description: "fails when tenant is not found", - tenant: "nonexistent", - sessionRec: true, - fixtures: []string{fixtures.FixtureNamespaces}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when tenant is found", - tenant: "00000000-0000-4000-0000-000000000000", - sessionRec: true, - fixtures: []string{fixtures.FixtureNamespaces}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.NamespaceSetSessionRecord(context.TODO(), tc.sessionRec, tc.tenant) - assert.Equal(t, tc.expected, err) - }) - } -} + ctx := context.Background() -func TestNamespaceGetSessionRecord(t *testing.T) { - type Expected struct { - set bool - err error - } + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) - cases := []struct { - description string - tenant string - fixtures []string - expected Expected - }{ - { - description: "fails when tenant is not found", - tenant: "nonexistent", - fixtures: []string{fixtures.FixtureNamespaces}, - expected: Expected{ - set: false, - err: store.ErrNoDocuments, - }, - }, - { - description: "succeeds when tenant is found", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixtureNamespaces}, - expected: Expected{ - set: true, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() + err := s.NamespaceIncrementDeviceCount(ctx, tc.tenant, tc.status, tc.count) + require.Equal(t, tc.expected.err, err) + if err != nil { + return + } - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + namespace := new(models.Namespace) + require.NoError(t, db.Collection("namespaces").FindOne(ctx, bson.M{"tenant_id": tc.tenant}).Decode(namespace)) - set, err := mongostore.NamespaceGetSessionRecord(context.TODO(), tc.tenant) - assert.Equal(t, tc.expected, Expected{set: set, err: err}) + require.Equal(t, tc.expected.acceptedCount, namespace.DevicesAcceptedCount) + require.Equal(t, tc.expected.pendingCount, namespace.DevicesPendingCount) + require.Equal(t, tc.expected.rejectedCount, namespace.DevicesRejectedCount) }) } } diff --git a/api/store/mongo/options/options.go b/api/store/mongo/options/options.go new file mode 100644 index 00000000000..1e41185ac39 --- /dev/null +++ b/api/store/mongo/options/options.go @@ -0,0 +1,106 @@ +package options + +import ( + "context" + + "github.com/pkg/errors" + "github.com/shellhub-io/shellhub/api/store/mongo/migrations" + "github.com/sirupsen/logrus" + lock "github.com/square/mongo-lock" + migrate "github.com/xakep666/mongo-migrate" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.mongodb.org/mongo-driver/mongo/writeconcern" +) + +type DatabaseOpt func(ctx context.Context, db *mongo.Database) error + +func RunMigatrions(ctx context.Context, db *mongo.Database) error { + logrus.Info("Creating lock for the resource migrations") + + lockClient := lock.NewClient(db.Collection("locks", options.Collection().SetWriteConcern(writeconcern.Majority()))) + if err := lockClient.CreateIndexes(context.TODO()); err != nil { + logrus.WithError(err).Fatal("Failed to create a lock for the database") + } + + logrus.Info("Locking the resource migrations") + + lockID := "0" + + if err := lockClient.XLock(ctx, "migrations", lockID, lock.LockDetails{}); err != nil { + logrus.WithError(err).Fatal("Failed to lock the migrations") + } + + defer func() { + logrus.Info("Unlocking the resource migrations") + + if _, err := lockClient.Unlock(ctx, lockID); err != nil { + logrus.WithError(err).Fatal("Failed to unlock the migrations") + } + }() + + if err := fixMigrations072(db); err != nil { + logrus.WithError(err).Fatal("Failed to fix the migrations lock bug") + } + + list := migrations.GenerateMigrations() + migration := migrate.NewMigrate(db, list...) + + current, _, err := migration.Version(ctx) + if err != nil { + logrus.WithError(err).Fatal("Failed to get current migration version") + } + + latest := list[len(list)-1] + + if current == latest.Version { + logrus.Info("No migrations to apply") + + return nil + } + + logrus.WithFields(logrus.Fields{ + "from": current, + "to": latest.Version, + }).Info("Migrating database") + + return migration.Up(ctx, migrate.AllAvailable) +} + +// This function is necessary due the lock bug on v0.7.2. +func fixMigrations072(db *mongo.Database) error { + // Search for lock in migrations collection. + if _, err := db.Collection("migrations").Find(context.TODO(), + bson.M{"resource": "migrations"}, + ); err != nil && err == mongo.ErrNoDocuments { + // No documents found, nothing to do. + return nil + } else if err != nil { + return errors.Wrap(err, "Failed to find a lock for the migrations") + } + + // Creates a temporary collection containing unique migration documents. + if _, err := db.Collection("migrations").Aggregate(context.TODO(), []bson.M{ + {"$match": bson.M{"version": bson.M{"$ne": nil}}}, + {"$sort": bson.M{"_id": 1}}, + {"$group": bson.M{"_id": "$version", "doc": bson.M{"$first": "$$ROOT"}}}, + {"$replaceRoot": bson.M{"newRoot": "$doc"}}, + {"$out": "migrations_tmp"}, + }); err != nil { + return errors.Wrap(err, "Failed to create a temporary collection") + } + + // Cleanup migrations collection. + if _, err := db.Collection("migrations").DeleteMany(context.TODO(), bson.M{}); err != nil { + return errors.Wrap(err, "Failed to cleanup the migrations collection") + } + + // Copy documents from temporary collection to migrations collection. + if _, err := db.Collection("migrations_tmp").Aggregate(context.TODO(), []bson.M{{"$out": "migrations"}}); err != nil { + return errors.Wrap(err, "Failed to copy the documents to a new migration collection") + } + + // Drop temporary collection. + return db.Collection("migrations_tmp").Drop(context.TODO()) +} diff --git a/api/store/mongo/privatekey_test.go b/api/store/mongo/privatekey_test.go index e0f5cf0fb84..2a004af93cd 100644 --- a/api/store/mongo/privatekey_test.go +++ b/api/store/mongo/privatekey_test.go @@ -1,14 +1,11 @@ -package mongo +package mongo_test import ( "context" "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" ) @@ -32,18 +29,16 @@ func TestPrivateKeyCreate(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - err := mongostore.PrivateKeyCreate(context.TODO(), tc.priKey) + err := s.PrivateKeyCreate(ctx, tc.priKey) assert.Equal(t, tc.expected, err) }) } @@ -64,7 +59,7 @@ func TestPrivateKeyGet(t *testing.T) { { description: "fails when private key is not found", fingerprint: "nonexistent", - fixtures: []string{fixtures.FixturePrivateKeys}, + fixtures: []string{fixturePrivateKeys}, expected: Expected{ privKey: nil, err: store.ErrNoDocuments, @@ -73,7 +68,7 @@ func TestPrivateKeyGet(t *testing.T) { { description: "succeeds when private key is found", fingerprint: "fingerprint", - fixtures: []string{fixtures.FixturePrivateKeys}, + fixtures: []string{fixturePrivateKeys}, expected: Expected{ privKey: &models.PrivateKey{ Data: []byte("test"), @@ -85,18 +80,16 @@ func TestPrivateKeyGet(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - privKey, err := mongostore.PrivateKeyGet(context.TODO(), tc.fingerprint) + privKey, err := s.PrivateKeyGet(ctx, tc.fingerprint) assert.Equal(t, tc.expected, Expected{privKey: privKey, err: err}) }) } diff --git a/api/store/mongo/publickey.go b/api/store/mongo/publickey.go index 6e97902746f..652612f627a 100644 --- a/api/store/mongo/publickey.go +++ b/api/store/mongo/publickey.go @@ -3,51 +3,84 @@ package mongo import ( "context" - "github.com/shellhub-io/shellhub/api/pkg/gateway" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mongo/queries" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/models" "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/mongo/options" + "go.mongodb.org/mongo-driver/bson/primitive" ) -func (s *Store) PublicKeyGet(ctx context.Context, fingerprint string, tenantID string) (*models.PublicKey, error) { +func (s *Store) PublicKeyResolve(ctx context.Context, resolver store.PublicKeyResolver, value string, opts ...store.QueryOption) (*models.PublicKey, error) { + var fingerprint string + switch resolver { + case store.PublicKeyFingerprintResolver: + fingerprint = value + default: + return nil, store.ErrNoDocuments + } + + pipeline := []bson.M{ + { + "$match": bson.M{ + "fingerprint": fingerprint, + }, + }, + { + "$lookup": bson.M{ + "from": "tags", + "localField": "filter.tag_ids", + "foreignField": "_id", + "as": "filter.tags", + }, + }, + } + + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &pipeline)); err != nil { + return nil, err + } + } + + cursor, err := s.db.Collection("public_keys").Aggregate(ctx, pipeline) + if err != nil { + return nil, FromMongoError(err) + } + defer cursor.Close(ctx) + + if !cursor.Next(ctx) { + return nil, store.ErrNoDocuments + } + pubKey := new(models.PublicKey) - if err := s.db.Collection("public_keys").FindOne(ctx, bson.M{"fingerprint": fingerprint, "tenant_id": tenantID}).Decode(&pubKey); err != nil { + if err := cursor.Decode(&pubKey); err != nil { return nil, FromMongoError(err) } return pubKey, nil } -func (s *Store) PublicKeyList(ctx context.Context, pagination paginator.Query) ([]models.PublicKey, int, error) { +func (s *Store) PublicKeyList(ctx context.Context, opts ...store.QueryOption) ([]models.PublicKey, int, error) { query := []bson.M{ { - "$sort": bson.M{ - "created_at": 1, + "$lookup": bson.M{ + "from": "tags", + "localField": "filter.tag_ids", + "foreignField": "_id", + "as": "filter.tags", }, }, } - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }) + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, 0, err + } } - queryCount := query - queryCount = append(queryCount, bson.M{"$count": "count"}) - count, err := AggregateCount(ctx, s.db.Collection("public_keys"), queryCount) + count, err := CountAllMatchingDocuments(ctx, s.db.Collection("public_keys"), query) if err != nil { return nil, 0, err } - query = append(query, queries.BuildPaginationQuery(pagination)...) - list := make([]models.PublicKey, 0) cursor, err := s.db.Collection("public_keys").Aggregate(ctx, query) if err != nil { @@ -68,31 +101,80 @@ func (s *Store) PublicKeyList(ctx context.Context, pagination paginator.Query) ( return list, count, err } -func (s *Store) PublicKeyCreate(ctx context.Context, key *models.PublicKey) error { - _, err := s.db.Collection("public_keys").InsertOne(ctx, key) +func (s *Store) PublicKeyCreate(ctx context.Context, key *models.PublicKey) (string, error) { + bsonBytes, err := bson.Marshal(key) + if err != nil { + return "", FromMongoError(err) + } + + doc := make(bson.M) + if err := bson.Unmarshal(bsonBytes, &doc); err != nil { + return "", FromMongoError(err) + } - return FromMongoError(err) + // WORKAROUND: Convert string TagIDs to MongoDB ObjectIDs for referential integrity + // with the tags collection where _id is ObjectID type + if len(key.Filter.TagIDs) > 0 { + tagIDs := doc["filter"].(bson.M)["tag_ids"].(bson.A) + for i, id := range tagIDs { + objID, _ := primitive.ObjectIDFromHex(id.(string)) + tagIDs[i] = objID + } + } + + if _, err := s.db.Collection("public_keys").InsertOne(ctx, doc); err != nil { + return "", FromMongoError(err) + } + + return doc["fingerprint"].(string), nil } -func (s *Store) PublicKeyUpdate(ctx context.Context, fingerprint string, tenantID string, key *models.PublicKeyUpdate) (*models.PublicKey, error) { - opts := options.FindOneAndUpdate().SetReturnDocument(options.After) - filter := bson.M{"fingerprint": fingerprint, "tenant_id": tenantID} +func (s *Store) PublicKeyUpdate(ctx context.Context, publicKey *models.PublicKey) error { + bsonBytes, err := bson.Marshal(publicKey) + if err != nil { + return FromMongoError(err) + } - pubKey := new(models.PublicKey) - if err := s.db.Collection("public_keys").FindOneAndUpdate(ctx, filter, bson.M{"$set": key}, opts).Decode(&pubKey); err != nil { - return nil, FromMongoError(err) + doc := make(bson.M) + if err := bson.Unmarshal(bsonBytes, &doc); err != nil { + return FromMongoError(err) } - return pubKey, nil + delete(doc, "_id") + // WORKAROUND: Convert string TagIDs to MongoDB ObjectIDs for referential integrity + // with the tags collection where _id is ObjectID type + delete(doc, "tags") + if filterDoc, ok := doc["filter"].(bson.M); ok { + if tagIDs, ok := filterDoc["tag_ids"].(bson.A); ok && len(tagIDs) > 0 { + for i, id := range tagIDs { + if idStr, ok := id.(string); ok { + objID, _ := primitive.ObjectIDFromHex(idStr) + tagIDs[i] = objID + } + } + } + } + + filter := bson.M{"fingerprint": publicKey.Fingerprint, "tenant_id": publicKey.TenantID} + r, err := s.db.Collection("public_keys").UpdateOne(ctx, filter, bson.M{"$set": doc}) + if err != nil { + return FromMongoError(err) + } + + if r.MatchedCount == 0 { + return store.ErrNoDocuments + } + + return nil } -func (s *Store) PublicKeyDelete(ctx context.Context, fingerprint string, tenantID string) error { - pubKey, err := s.db.Collection("public_keys").DeleteOne(ctx, bson.M{"fingerprint": fingerprint, "tenant_id": tenantID}) +func (s *Store) PublicKeyDelete(ctx context.Context, publicKey *models.PublicKey) error { + r, err := s.db.Collection("public_keys").DeleteOne(ctx, bson.M{"fingerprint": publicKey.Fingerprint, "tenant_id": publicKey.TenantID}) if err != nil { return FromMongoError(err) } - if pubKey.DeletedCount < 1 { + if r.DeletedCount < 1 { return store.ErrNoDocuments } diff --git a/api/store/mongo/publickey_tags.go b/api/store/mongo/publickey_tags.go deleted file mode 100644 index 8a11753d90b..00000000000 --- a/api/store/mongo/publickey_tags.go +++ /dev/null @@ -1,63 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/shellhub-io/shellhub/api/store" - "go.mongodb.org/mongo-driver/bson" -) - -func (s *Store) PublicKeyPushTag(ctx context.Context, tenant, fingerprint, tag string) error { - result, err := s.db.Collection("public_keys").UpdateOne(ctx, bson.M{"tenant_id": tenant, "fingerprint": fingerprint}, bson.M{"$addToSet": bson.M{"filter.tags": tag}}) - if err != nil { - return err - } - - if result.ModifiedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) PublicKeyPullTag(ctx context.Context, tenant, fingerprint, tag string) error { - result, err := s.db.Collection("public_keys").UpdateOne(ctx, bson.M{"tenant_id": tenant, "fingerprint": fingerprint}, bson.M{"$pull": bson.M{"filter.tags": tag}}) - if err != nil { - return err - } - - if result.ModifiedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) PublicKeySetTags(ctx context.Context, tenant, fingerprint string, tags []string) (int64, int64, error) { - res, err := s.db.Collection("public_keys").UpdateOne(ctx, bson.M{"tenant_id": tenant, "fingerprint": fingerprint}, bson.M{"$set": bson.M{"filter.tags": tags}}) - - return res.MatchedCount, res.ModifiedCount, FromMongoError(err) -} - -func (s *Store) PublicKeyBulkRenameTag(ctx context.Context, tenant, currentTag, newTag string) (int64, error) { - res, err := s.db.Collection("public_keys").UpdateMany(ctx, bson.M{"tenant_id": tenant, "filter.tags": currentTag}, bson.M{"$set": bson.M{"filter.tags.$": newTag}}) - - return res.ModifiedCount, FromMongoError(err) -} - -func (s *Store) PublicKeyBulkDeleteTag(ctx context.Context, tenant, tag string) (int64, error) { - res, err := s.db.Collection("public_keys").UpdateMany(ctx, bson.M{"tenant_id": tenant}, bson.M{"$pull": bson.M{"filter.tags": tag}}) - - return res.ModifiedCount, FromMongoError(err) -} - -func (s *Store) PublicKeyGetTags(ctx context.Context, tenant string) ([]string, int, error) { - list, err := s.db.Collection("public_keys").Distinct(ctx, "filter.tags", bson.M{"tenant_id": tenant}) - - tags := make([]string, len(list)) - for i, item := range list { - tags[i] = item.(string) //nolint:forcetypeassert - } - - return tags, len(tags), FromMongoError(err) -} diff --git a/api/store/mongo/publickey_tags_test.go b/api/store/mongo/publickey_tags_test.go deleted file mode 100644 index 722979fcec6..00000000000 --- a/api/store/mongo/publickey_tags_test.go +++ /dev/null @@ -1,378 +0,0 @@ -package mongo - -import ( - "context" - "testing" - - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/stretchr/testify/assert" -) - -func TestPublicKeyPushTag(t *testing.T) { - cases := []struct { - description string - fingerprint string - tenant string - tag string - fixtures []string - expected error - }{ - { - description: "fails when public key is not found due to fingerprint", - fingerprint: "nonexistent", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "new-tag", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: store.ErrNoDocuments, - }, - { - description: "fails when public key is not found due to tenant", - fingerprint: "fingerprint", - tenant: "nonexistent", - tag: "new-tag", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when public key is found", - fingerprint: "fingerprint", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "new-tag", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.PublicKeyPushTag(context.TODO(), tc.tenant, tc.fingerprint, tc.tag) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestPublicKeyPullTag(t *testing.T) { - cases := []struct { - description string - fingerprint string - tenant string - tag string - fixtures []string - expected error - }{ - { - description: "fails when public key is not found due to fingerprint", - fingerprint: "nonexistent", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "tag-1", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: store.ErrNoDocuments, - }, - { - description: "fails when public key is not found due to tenant", - fingerprint: "fingerprint", - tenant: "nonexistent", - tag: "tag-1", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: store.ErrNoDocuments, - }, - { - description: "fails when public key is not found due to tag", - fingerprint: "fingerprint", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "nonexistent", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when public key is found", - fingerprint: "fingerprint", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "tag-1", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.PublicKeyPullTag(context.TODO(), tc.tenant, tc.fingerprint, tc.tag) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestPublicKeySetTags(t *testing.T) { - type Expected struct { - matchedCount int64 - updatedCount int64 - err error - } - - cases := []struct { - description string - fingerprint string - tenant string - tags []string - fixtures []string - expected Expected - }{ - { - description: "fails when public key is not found due to fingerprint", - fingerprint: "nonexistent", - tenant: "00000000-0000-4000-0000-000000000000", - tags: []string{"tag-1"}, - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - matchedCount: 0, - updatedCount: 0, - err: nil, - }, - }, - { - description: "fails when public key is not found due to tenant", - fingerprint: "fingerprint", - tenant: "nonexistent", - tags: []string{"tag-1"}, - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - matchedCount: 0, - updatedCount: 0, - err: nil, - }, - }, - { - description: "succeeds when tags public key is found and tags are equal than current public key tags", - fingerprint: "fingerprint", - tenant: "00000000-0000-4000-0000-000000000000", - tags: []string{"tag-1"}, - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - matchedCount: 1, - updatedCount: 0, - err: nil, - }, - }, - { - description: "succeeds when tags public key is found", - fingerprint: "fingerprint", - tenant: "00000000-0000-4000-0000-000000000000", - tags: []string{"new-tag"}, - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - matchedCount: 1, - updatedCount: 1, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - matchedCount, updatedCount, err := mongostore.PublicKeySetTags(context.TODO(), tc.tenant, tc.fingerprint, tc.tags) - assert.Equal(t, tc.expected, Expected{matchedCount, updatedCount, err}) - }) - } -} - -func TestPublicKeyBulkRenameTag(t *testing.T) { - type Expected struct { - count int64 - err error - } - - cases := []struct { - description string - fingerprint string - tenant string - oldTag string - newTag string - fixtures []string - expected Expected - }{ - { - description: "fails when public key is not found due to tenant", - fingerprint: "fingerprint", - tenant: "nonexistent", - oldTag: "tag-1", - newTag: "edited-tag", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "fails when public key is not found due to tag", - tenant: "00000000-0000-4000-0000-000000000000", - oldTag: "nonexistent", - newTag: "edited-tag", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "succeeds when public key is found", - tenant: "00000000-0000-4000-0000-000000000000", - oldTag: "tag-1", - newTag: "edited-tag", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - count: 1, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - count, err := mongostore.PublicKeyBulkRenameTag(context.TODO(), tc.tenant, tc.oldTag, tc.newTag) - assert.Equal(t, tc.expected, Expected{count, err}) - }) - } -} - -func TestPublicKeyBulkDeleteTag(t *testing.T) { - type Expected struct { - count int64 - err error - } - - cases := []struct { - description string - tenant string - tag string - fixtures []string - expected Expected - }{ - { - description: "fails when public key is not found due to tenant", - tenant: "nonexistent", - tag: "tag-1", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "fails when public key is not found due to tag", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "nonexistent", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - count: 0, - err: nil, - }, - }, - { - description: "succeeds when public key is found", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "tag-1", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - count: 1, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - count, err := mongostore.PublicKeyBulkDeleteTag(context.TODO(), tc.tenant, tc.tag) - assert.Equal(t, tc.expected, Expected{count, err}) - }) - } -} - -func TestPublicKeyGetTags(t *testing.T) { - type Expected struct { - tags []string - len int - err error - } - - cases := []struct { - description string - tenant string - fixtures []string - expected Expected - }{ - { - description: "succeeds when tags list is greater than 1", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - tags: []string{"tag-1"}, - len: 1, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - tags, count, err := mongostore.PublicKeyGetTags(context.TODO(), tc.tenant) - assert.Equal(t, tc.expected, Expected{tags: tags, len: count, err: err}) - }) - } -} diff --git a/api/store/mongo/publickey_test.go b/api/store/mongo/publickey_test.go index 1d3fa8afb9a..f7ce9903139 100644 --- a/api/store/mongo/publickey_test.go +++ b/api/store/mongo/publickey_test.go @@ -1,20 +1,17 @@ -package mongo +package mongo_test import ( "context" "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" ) -func TestPublicKeyGet(t *testing.T) { +func TestPublicKeyResolve(t *testing.T) { type Expected struct { pubKey *models.PublicKey err error @@ -22,16 +19,18 @@ func TestPublicKeyGet(t *testing.T) { cases := []struct { description string - fingerprint string - tenant string + resolver store.PublicKeyResolver + value string + opts []store.QueryOption fixtures []string expected Expected }{ { description: "succeeds when public key is not found due to fingerprint", - fingerprint: "nonexistent", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixturePublicKeys}, + resolver: store.PublicKeyFingerprintResolver, + value: "nonexistent", + opts: []store.QueryOption{s.Options().InNamespace("00000000-0000-4000-0000-000000000000")}, + fixtures: []string{fixtureTags, fixturePublicKeys}, expected: Expected{ pubKey: nil, err: store.ErrNoDocuments, @@ -39,9 +38,10 @@ func TestPublicKeyGet(t *testing.T) { }, { description: "succeeds when public key is not found due to tenant", - fingerprint: "fingerprint", - tenant: "nonexistent", - fixtures: []string{fixtures.FixturePublicKeys}, + resolver: store.PublicKeyFingerprintResolver, + value: "fingerprint", + opts: []store.QueryOption{s.Options().InNamespace("nonexistent")}, + fixtures: []string{fixtureTags, fixturePublicKeys}, expected: Expected{ pubKey: nil, err: store.ErrNoDocuments, @@ -49,9 +49,10 @@ func TestPublicKeyGet(t *testing.T) { }, { description: "succeeds when public key is found", - fingerprint: "fingerprint", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixturePublicKeys}, + resolver: store.PublicKeyFingerprintResolver, + value: "fingerprint", + opts: []store.QueryOption{s.Options().InNamespace("00000000-0000-4000-0000-000000000000")}, + fixtures: []string{fixtureTags, fixturePublicKeys}, expected: Expected{ pubKey: &models.PublicKey{ Data: []byte("test"), @@ -62,7 +63,25 @@ func TestPublicKeyGet(t *testing.T) { Name: "public_key", Filter: models.PublicKeyFilter{ Hostname: ".*", - Tags: []string{"tag-1"}, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, }, }, @@ -71,18 +90,16 @@ func TestPublicKeyGet(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - pubKey, err := mongostore.PublicKeyGet(context.TODO(), tc.fingerprint, tc.tenant) + pubKey, err := s.PublicKeyResolve(ctx, tc.resolver, tc.value, tc.opts...) assert.Equal(t, tc.expected, Expected{pubKey: pubKey, err: err}) }) } @@ -97,11 +114,13 @@ func TestPublicKeyList(t *testing.T) { cases := []struct { description string + opts []store.QueryOption fixtures []string expected Expected }{ { description: "succeeds when public key list is empty", + opts: []store.QueryOption{s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1})}, fixtures: []string{}, expected: Expected{ pubKey: []models.PublicKey{}, @@ -111,7 +130,8 @@ func TestPublicKeyList(t *testing.T) { }, { description: "succeeds when public key list len is greater than 1", - fixtures: []string{fixtures.FixturePublicKeys}, + opts: []store.QueryOption{s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1})}, + fixtures: []string{fixtureTags, fixturePublicKeys}, expected: Expected{ pubKey: []models.PublicKey{ { @@ -123,7 +143,25 @@ func TestPublicKeyList(t *testing.T) { Name: "public_key", Filter: models.PublicKeyFilter{ Hostname: ".*", - Tags: []string{"tag-1"}, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, }, }, @@ -134,29 +172,32 @@ func TestPublicKeyList(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - pubKey, count, err := mongostore.PublicKeyList(context.TODO(), paginator.Query{Page: -1, PerPage: -1}) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + pubKey, count, err := s.PublicKeyList(ctx, tc.opts...) assert.Equal(t, tc.expected, Expected{pubKey: pubKey, len: count, err: err}) }) } } func TestPublicKeyCreate(t *testing.T) { + type Expected struct { + fingerprint string + err error + } + cases := []struct { description string key *models.PublicKey fixtures []string - expected error + expected Expected }{ { description: "succeeds when data is valid", @@ -167,119 +208,89 @@ func TestPublicKeyCreate(t *testing.T) { PublicKeyFields: models.PublicKeyFields{Name: "public_key", Filter: models.PublicKeyFilter{Hostname: ".*"}}, }, fixtures: []string{}, - expected: nil, + expected: Expected{fingerprint: "fingerprint", err: nil}, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - err := mongostore.PublicKeyCreate(context.TODO(), tc.key) - assert.Equal(t, tc.expected, err) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + fingerprint, err := s.PublicKeyCreate(ctx, tc.key) + assert.Equal(t, tc.expected, Expected{fingerprint: fingerprint, err: err}) }) } } func TestPublicKeyUpdate(t *testing.T) { - type Expected struct { - pubKey *models.PublicKey - err error - } - cases := []struct { description string - fingerprint string - tenant string - key *models.PublicKeyUpdate + publicKey *models.PublicKey fixtures []string - expected Expected + expected error }{ { - description: "succeeds when public key is not found due to fingerprint", - fingerprint: "nonexistent", - tenant: "00000000-0000-4000-0000-000000000000", - key: &models.PublicKeyUpdate{ + description: "fails when public key is not found due to fingerprint", + publicKey: &models.PublicKey{ + Fingerprint: "nonexistent", + TenantID: "00000000-0000-4000-0000-000000000000", PublicKeyFields: models.PublicKeyFields{ Name: "edited_name", Filter: models.PublicKeyFilter{Hostname: ".*"}, }, }, - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - pubKey: nil, - err: store.ErrNoDocuments, - }, + fixtures: []string{fixturePublicKeys}, + expected: store.ErrNoDocuments, }, { - description: "succeeds when public key is not found due to tenant", - fingerprint: "fingerprint", - tenant: "nonexistent", - key: &models.PublicKeyUpdate{ + description: "fails when public key is not found due to tenant", + publicKey: &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "nonexistent", PublicKeyFields: models.PublicKeyFields{ Name: "edited_name", Filter: models.PublicKeyFilter{Hostname: ".*"}, }, }, - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - pubKey: nil, - err: store.ErrNoDocuments, - }, + fixtures: []string{fixturePublicKeys}, + expected: store.ErrNoDocuments, }, { description: "succeeds when public key is found", - fingerprint: "fingerprint", - tenant: "00000000-0000-4000-0000-000000000000", - key: &models.PublicKeyUpdate{ + publicKey: &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "00000000-0000-4000-0000-000000000000", PublicKeyFields: models.PublicKeyFields{ Name: "edited_key", Filter: models.PublicKeyFilter{ Hostname: ".*", - Tags: []string{"edited-tag"}, - }, - }, - }, - fixtures: []string{fixtures.FixturePublicKeys}, - expected: Expected{ - pubKey: &models.PublicKey{ - Data: []byte("test"), - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Fingerprint: "fingerprint", - TenantID: "00000000-0000-4000-0000-000000000000", - PublicKeyFields: models.PublicKeyFields{ - Name: "edited_key", - Filter: models.PublicKeyFilter{ - Hostname: ".*", - Tags: []string{"edited-tag"}, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, }, }, }, - err: nil, }, + fixtures: []string{fixtureTags, fixturePublicKeys}, + expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - pubKey, err := mongostore.PublicKeyUpdate(context.TODO(), tc.fingerprint, tc.tenant, tc.key) - assert.Equal(t, tc.expected, Expected{pubKey: pubKey, err: err}) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + err := s.PublicKeyUpdate(ctx, tc.publicKey) + assert.Equal(t, tc.expected, err) }) } } @@ -287,46 +298,49 @@ func TestPublicKeyUpdate(t *testing.T) { func TestPublicKeyDelete(t *testing.T) { cases := []struct { description string - fingerprint string - tenant string + publicKey *models.PublicKey fixtures []string expected error }{ { description: "fails when public key is not found due to fingerprint", - fingerprint: "nonexistent", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: store.ErrNoDocuments, + publicKey: &models.PublicKey{ + Fingerprint: "nonexistent", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + fixtures: []string{fixturePublicKeys}, + expected: store.ErrNoDocuments, }, { description: "fails when public key is not found due to tenant", - fingerprint: "fingerprint", - tenant: "nonexistent", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: store.ErrNoDocuments, + publicKey: &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "nonexistent", + }, + fixtures: []string{fixturePublicKeys}, + expected: store.ErrNoDocuments, }, { description: "succeeds when public key is found", - fingerprint: "fingerprint", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixturePublicKeys}, - expected: nil, + publicKey: &models.PublicKey{ + Fingerprint: "fingerprint", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + fixtures: []string{fixturePublicKeys}, + expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - err := mongostore.PublicKeyDelete(context.TODO(), tc.fingerprint, tc.tenant) + err := s.PublicKeyDelete(ctx, tc.publicKey) assert.Equal(t, tc.expected, err) }) } diff --git a/api/store/mongo/queries/queries.go b/api/store/mongo/queries/queries.go deleted file mode 100644 index 6e061020112..00000000000 --- a/api/store/mongo/queries/queries.go +++ /dev/null @@ -1,174 +0,0 @@ -package queries - -import ( - "errors" - "strconv" - - "github.com/shellhub-io/shellhub/pkg/api/order" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/models" - "go.mongodb.org/mongo-driver/bson" -) - -var ( - ErrFilterInvalid = errors.New("filter is invalid") - ErrFilterPropertyInvalid = errors.New("filter property is not valid") -) - -// BuildFilterQuery creates a MongoDB's filter query from models.Filter for filtering a fields in a database. -func BuildFilterQuery(filters []models.Filter) ([]bson.M, error) { - const ( - TypeProperty = "property" - TypeOperator = "operator" - ) - - properties := map[string]func(value interface{}) (bson.M, error){ - "contains": func(value interface{}) (bson.M, error) { - switch value.(type) { - case string: - return bson.M{"$regex": value, "$options": "i"}, nil - case []interface{}: - return bson.M{"$all": value}, nil - } - - return nil, ErrFilterPropertyInvalid - }, - "eq": func(value interface{}) (bson.M, error) { //nolint:unparam - return bson.M{"$eq": value}, nil - }, - "bool": func(value interface{}) (bson.M, error) { - switch v := value.(type) { - case int: - value = v != 0 - case string: - var err error - value, err = strconv.ParseBool(v) - if err != nil { - return nil, err - } - } - - return bson.M{"$eq": value}, nil - }, - "gt": func(value interface{}) (bson.M, error) { - switch v := value.(type) { - case int: - value = v - case string: - var err error - value, err = strconv.Atoi(v) - if err != nil { - return nil, err - } - } - - return bson.M{"$gt": value}, nil - }, - } - - operations := map[string]func() (string, error){ - "and": func() (string, error) { - return "$and", nil - }, - "or": func() (string, error) { - return "$or", nil - }, - } - - var queryFilter []bson.M - var queryMatcher []bson.M - - for _, filter := range filters { - switch filter.Type { - case TypeProperty: - // Converts a filter's param type to PropertyParams. - params, ok := filter.Params.(*models.PropertyParams) - if !ok { - return nil, ErrFilterInvalid - } - - // Trys to get a function that returns the query through operator. - fn, ok := properties[params.Operator] - if !ok { - // If the operator is not found, jump to next iteration. - continue - } - - // If the property is valid, get the data returned from function to use at the query. - property, err := fn(params.Value) - if err != nil { - return nil, err - } - - queryFilter = append(queryFilter, bson.M{params.Name: property}) - case TypeOperator: - // Converts a filter's param type to OperatorParams. - params, ok := filter.Params.(*models.OperatorParams) - if !ok { - return nil, ErrFilterInvalid - } - - // Trys to get a function that returns the query through param's name. - fn, ok := operations[params.Name] - if !ok { - // If the operation's name is not found, jump to next iteration. - continue - } - - operation, err := fn() - if err != nil { - return nil, err - } - - queryMatcher = append(queryMatcher, bson.M{ - "$match": bson.M{operation: queryFilter}, - }) - - queryFilter = nil - default: - return nil, ErrFilterInvalid - } - } - - if len(queryFilter) > 0 { - queryMatcher = []bson.M{ - { - "$match": bson.M{"$or": queryFilter}, - }, - } - } - - return queryMatcher, nil -} - -// BuildPaginationQuery creates a MongoDB's query from a paginator.Query with pagination to limit the number of returned results. -func BuildPaginationQuery(pagination paginator.Query) []bson.M { - if pagination.PerPage == -1 { - return nil - } - - return []bson.M{ - {"$skip": pagination.PerPage * (pagination.Page - 1)}, - {"$limit": pagination.PerPage}, - } -} - -func BuildOrderQuery(ordination order.Query, field string) []bson.M { - options := map[string]int{ - order.Asc: 1, - order.Desc: -1, - } - - selected, ok := options[ordination.OrderBy] - if !ok { - selected = 1 - } - - return []bson.M{ - { - "$sort": bson.M{ - field: selected, - }, - }, - } -} diff --git a/api/store/mongo/queries/queries_test.go b/api/store/mongo/queries/queries_test.go deleted file mode 100644 index be29a0ef8b0..00000000000 --- a/api/store/mongo/queries/queries_test.go +++ /dev/null @@ -1,223 +0,0 @@ -package queries - -import ( - "testing" - - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/assert" - "go.mongodb.org/mongo-driver/bson" -) - -func TestBuildQuery(t *testing.T) { - type Expected struct { - data []bson.M - err error - } - cases := []struct { - description string - filters []models.Filter - expected Expected - }{ - { - description: "Fail when filter type is not valid", - filters: []models.Filter{ - { - Type: "invalid", - Params: &models.PropertyParams{ - Name: "test", - Operator: "valid", - Value: "test", - }, - }, - }, - expected: Expected{nil, ErrFilterInvalid}, - }, - { - description: "Fail when operator in property is invalid", - filters: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{ - Name: "test", - Operator: "invalid", - Value: "valid", - }, - }, - }, - expected: Expected{ - data: nil, - err: nil, - }, - }, - { - description: "Success when one operator in property is valid and other is invalid", - filters: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{ - Name: "test", - Operator: "invalid", - Value: "test", - }, - }, - { - Type: "property", - Params: &models.PropertyParams{ - Name: "test", - Operator: "eq", - Value: "valid", - }, - }, - }, - expected: Expected{ - data: []bson.M{{"$match": bson.M{"$or": []bson.M{{"test": bson.M{"$eq": "valid"}}}}}}, - err: nil, - }, - }, - { - description: "Success when operator in property is valid", - filters: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{ - Name: "test", - Operator: "eq", - Value: "valid", - }, - }, - }, - expected: Expected{ - data: []bson.M{{"$match": bson.M{"$or": []bson.M{{"test": bson.M{"$eq": "valid"}}}}}}, - err: nil, - }, - }, - { - description: "Fail when operator in operator is invalid", - filters: []models.Filter{ - { - Type: "operator", - Params: &models.OperatorParams{ - Name: "invalid", - }, - }, - }, - expected: Expected{ - data: nil, - err: nil, - }, - }, - { - description: "Fail when operator in operator is valid and other invalid", - filters: []models.Filter{ - { - Type: "operator", - Params: &models.OperatorParams{ - Name: "and", - }, - }, - { - Type: "operator", - Params: &models.OperatorParams{ - Name: "invalid", - }, - }, - }, - expected: Expected{ - data: []bson.M{{"$match": bson.M{"$and": []bson.M(nil)}}}, - err: nil, - }, - }, - { - description: "Success when operator in operator is valid", - filters: []models.Filter{ - { - Type: "operator", - Params: &models.OperatorParams{ - Name: "and", - }, - }, - }, - expected: Expected{ - data: []bson.M{{"$match": bson.M{"$and": []bson.M(nil)}}}, - err: nil, - }, - }, - { - description: "Fail when property operator is invalid and operator is valid", - filters: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{ - Name: "test", - Operator: "invalid", - Value: "test", - }, - }, - { - Type: "operator", - Params: &models.OperatorParams{ - Name: "and", - }, - }, - }, - expected: Expected{ - data: []bson.M{{"$match": bson.M{"$and": []bson.M(nil)}}}, - err: nil, - }, - }, - { - description: "Fail when property operator is valid and operator is invalid", - filters: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{ - Name: "test", - Operator: "eq", - Value: "test", - }, - }, - { - Type: "operator", - Params: &models.OperatorParams{ - Name: "invalid", - }, - }, - }, - expected: Expected{ - data: []bson.M{{"$match": bson.M{"$or": []bson.M{{"test": bson.M{"$eq": "test"}}}}}}, - err: nil, - }, - }, - { - description: "Success when property and operator is valid", - filters: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{ - Name: "test", - Operator: "eq", - Value: "test", - }, - }, - { - Type: "operator", - Params: &models.OperatorParams{ - Name: "and", - }, - }, - }, - expected: Expected{ - data: []bson.M{{"$match": bson.M{"$and": []bson.M{{"test": bson.M{"$eq": "test"}}}}}}, - err: nil, - }, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - query, err := BuildFilterQuery(tc.filters) - - assert.Equal(t, tc.expected, Expected{query, err}) - }) - } -} diff --git a/api/store/mongo/query-options.go b/api/store/mongo/query-options.go new file mode 100644 index 00000000000..765c72fa082 --- /dev/null +++ b/api/store/mongo/query-options.go @@ -0,0 +1,145 @@ +package mongo + +import ( + "context" + "errors" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/api/store/mongo/internal" + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/models" + "go.mongodb.org/mongo-driver/bson" +) + +func (s *Store) Options() store.QueryOptions { + return s.options +} + +func (*queryOptions) InNamespace(tenantID string) store.QueryOption { + return func(ctx context.Context) error { + query, ok := ctx.Value("query").(*[]bson.M) + if !ok { + return errors.New("query not found in context") + } + + *query = append(*query, bson.M{ + "$match": bson.M{ + "tenant_id": tenantID, + }, + }) + + return nil + } +} + +func (*queryOptions) WithDeviceStatus(status models.DeviceStatus) store.QueryOption { + return func(ctx context.Context) error { + query, ok := ctx.Value("query").(*[]bson.M) + if !ok { + return errors.New("query not found in context") + } + + *query = append(*query, bson.M{ + "$match": bson.M{ + "status": status, + }, + }) + + return nil + } +} + +func (*queryOptions) Sort(sorter *query.Sorter) store.QueryOption { + return func(ctx context.Context) error { + if sorter == nil || sorter.By == "" { + return nil + } + + pipeline, ok := ctx.Value("query").(*[]bson.M) + if !ok { + return errors.New("query not found in context") + } + + options := map[string]int{query.OrderAsc: 1, query.OrderDesc: -1} + order, ok := options[sorter.Order] + if !ok { + order = -1 + } + + *pipeline = append(*pipeline, bson.M{"$sort": bson.M{sorter.By: order}}) + + return nil + } +} + +func (*queryOptions) Paginate(paginator *query.Paginator) store.QueryOption { + return func(ctx context.Context) error { + if paginator == nil || paginator.Page < 1 || paginator.PerPage < 1 { + return nil + } + + pipeline, ok := ctx.Value("query").(*[]bson.M) + if !ok { + return errors.New("query not found in context") + } + + *pipeline = append(*pipeline, []bson.M{{"$skip": paginator.PerPage * (paginator.Page - 1)}, {"$limit": paginator.PerPage}}...) + + return nil + } +} + +func (*queryOptions) Match(filters *query.Filters) store.QueryOption { + return func(ctx context.Context) error { + if len(filters.Data) < 1 { + return nil + } + + pipeline, ok := ctx.Value("query").(*[]bson.M) + if !ok { + return errors.New("query not found in context") + } + + conditions, stages := make([]bson.M, 0), make([]bson.M, 0) + for _, data := range filters.Data { + switch data.Type { + case query.FilterTypeProperty: + param, ok := data.Params.(*query.FilterProperty) + if !ok { + return query.ErrFilterInvalid + } + + property, ok, err := internal.ParseFilterProperty(param) + switch { + case err != nil: + return query.ErrFilterPropertyInvalid + case ok: + conditions = append(conditions, bson.M{param.Name: property}) + } + case query.FilterTypeOperator: + param, ok := data.Params.(*query.FilterOperator) + if !ok { + return query.ErrFilterInvalid + } + + operator, ok := internal.ParseFilterOperator(param) + if !ok { + continue + } + + stages = append(stages, bson.M{"$match": bson.M{operator: conditions}}) + conditions = nil + default: + return query.ErrFilterInvalid + } + } + + if len(conditions) > 0 { + stages = append(stages, bson.M{"$match": bson.M{"$or": conditions}}) + } + + *pipeline = append(*pipeline, stages...) + + return nil + } +} diff --git a/api/store/mongo/session.go b/api/store/mongo/session.go index 2734a78ab6b..3dc1c8d9173 100644 --- a/api/store/mongo/session.go +++ b/api/store/mongo/session.go @@ -2,53 +2,27 @@ package mongo import ( "context" - "time" - "github.com/shellhub-io/shellhub/api/pkg/gateway" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mongo/queries" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/models" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" ) -func (s *Store) SessionList(ctx context.Context, pagination paginator.Query) ([]models.Session, int, error) { - query := []bson.M{ - { - "$match": bson.M{ - "uid": bson.M{ - "$ne": nil, - }, - }, - }, - } - - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }) +func (s *Store) SessionList(ctx context.Context, opts ...store.QueryOption) ([]models.Session, int, error) { + query := []bson.M{{"$match": bson.M{"uid": bson.M{"$ne": nil}}}} + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, 0, err + } } - queryCount := query - queryCount = append(queryCount, bson.M{"$count": "count"}) - count, err := AggregateCount(ctx, s.db.Collection("sessions"), queryCount) + count, err := CountAllMatchingDocuments(ctx, s.db.Collection("sessions"), query) if err != nil { return nil, 0, FromMongoError(err) } - query = append(query, bson.M{ - "$sort": bson.M{ - "started_at": -1, - }, - }) - - query = append(query, queries.BuildPaginationQuery(pagination)...) query = append(query, []bson.M{ { "$lookup": bson.M{ @@ -58,11 +32,48 @@ func (s *Store) SessionList(ctx context.Context, pagination paginator.Query) ([] "as": "active", }, }, + { + "$lookup": bson.M{ + "from": "sessions_events", + "let": bson.M{"sessionUID": "$uid"}, + "pipeline": []bson.M{ + { + "$match": bson.M{ + "$expr": bson.M{"$eq": []string{"$session", "$$sessionUID"}}, + }, + }, + { + "$group": bson.M{ + "_id": nil, + "types": bson.M{"$addToSet": "$type"}, + "seats": bson.M{"$addToSet": "$seat"}, + }, + }, + }, + "as": "eventData", + }, + }, { "$addFields": bson.M{ - "active": bson.M{"$anyElementTrue": []interface{}{"$active"}}, + "active": bson.M{"$anyElementTrue": []any{"$active"}}, + "events": bson.M{ + "$cond": bson.M{ + "if": bson.M{"$gt": []any{bson.M{"$size": "$eventData"}, 0}}, + "then": bson.M{ + "types": bson.M{"$arrayElemAt": []any{"$eventData.types", 0}}, + "seats": bson.M{"$arrayElemAt": []any{"$eventData.seats", 0}}, + }, + "else": bson.M{ + "types": []string{}, + "seats": []int{}, + }, + }, + }, }, }, + { + "$unset": "eventData", + }, }...) sessions := make([]models.Session, 0) @@ -70,6 +81,7 @@ func (s *Store) SessionList(ctx context.Context, pagination paginator.Query) ([] if err != nil { return sessions, count, FromMongoError(err) } + defer cursor.Close(ctx) for cursor.Next(ctx) { @@ -79,7 +91,10 @@ func (s *Store) SessionList(ctx context.Context, pagination paginator.Query) ([] return sessions, count, err } - device, err := s.DeviceGet(ctx, session.DeviceUID) + // WARNING: N+1 query problem - DeviceResolve makes a separate database call + // for each session in the result set. For large result sets, consider using + // a $lookup stage in the aggregation pipeline or batch-loading devices. + device, err := s.DeviceResolve(ctx, store.DeviceUIDResolver, string(session.DeviceUID)) if err != nil { return sessions, count, err } @@ -91,7 +106,15 @@ func (s *Store) SessionList(ctx context.Context, pagination paginator.Query) ([] return sessions, count, err } -func (s *Store) SessionGet(ctx context.Context, uid models.UID) (*models.Session, error) { +func (s *Store) SessionResolve(ctx context.Context, resolver store.SessionResolver, value string, opts ...store.QueryOption) (*models.Session, error) { + var uid models.UID + switch resolver { + case store.SessionUIDResolver: + uid = models.UID(value) + default: + return nil, store.ErrNoDocuments + } + query := []bson.M{ { "$match": bson.M{"uid": uid}, @@ -105,36 +128,63 @@ func (s *Store) SessionGet(ctx context.Context, uid models.UID) (*models.Session }, }, { - "$addFields": bson.M{ - "active": bson.M{"$anyElementTrue": []interface{}{"$active"}}, + "$lookup": bson.M{ + "from": "sessions_events", + "let": bson.M{"sessionUID": "$uid"}, + "pipeline": []bson.M{ + { + "$match": bson.M{ + "$expr": bson.M{"$eq": []string{"$session", "$$sessionUID"}}, + }, + }, + { + "$group": bson.M{ + "_id": nil, + "types": bson.M{"$addToSet": "$type"}, + "seats": bson.M{"$addToSet": "$seat"}, + }, + }, + }, + "as": "eventData", }, }, - } - - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, + { + "$addFields": bson.M{ + "active": bson.M{"$anyElementTrue": []any{"$active"}}, + "events": bson.M{ + "$cond": bson.M{ + "if": bson.M{"$gt": []any{bson.M{"$size": "$eventData"}, 0}}, + "then": bson.M{ + "types": bson.M{"$arrayElemAt": []any{"$eventData.types", 0}}, + "seats": bson.M{"$arrayElemAt": []any{"$eventData.seats", 0}}, + }, + "else": bson.M{ + "types": []string{}, + "seats": []int{}, + }, + }, + }, }, - }) + }, + { + "$unset": "eventData", + }, } - session := new(models.Session) - cursor, err := s.db.Collection("sessions").Aggregate(ctx, query) if err != nil { return nil, FromMongoError(err) } + defer cursor.Close(ctx) cursor.Next(ctx) - err = cursor.Decode(&session) - if err != nil { + session := new(models.Session) + if err = cursor.Decode(&session); err != nil { return nil, FromMongoError(err) } - device, err := s.DeviceGet(ctx, session.DeviceUID) + device, err := s.DeviceResolve(ctx, store.DeviceUIDResolver, string(session.DeviceUID)) if err != nil { return nil, FromMongoError(err) } @@ -144,275 +194,170 @@ func (s *Store) SessionGet(ctx context.Context, uid models.UID) (*models.Session return session, nil } -func (s *Store) SessionSetAuthenticated(ctx context.Context, uid models.UID, authenticated bool) error { - session, err := s.db.Collection("sessions").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"authenticated": authenticated}}) - if err != nil { - return FromMongoError(err) - } - - if session.MatchedCount < 1 { - return store.ErrNoDocuments - } - - return nil -} - -func (s *Store) SessionSetRecorded(ctx context.Context, uid models.UID, recorded bool) error { - session, err := s.db.Collection("sessions").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"recorded": recorded}}) +func (s *Store) SessionUpdate(ctx context.Context, session *models.Session) error { + r, err := s.db.Collection("sessions").UpdateOne(ctx, bson.M{"uid": session.UID}, bson.M{"$set": session}) if err != nil { return FromMongoError(err) } - if session.MatchedCount < 1 { + if r.MatchedCount < 1 { return store.ErrNoDocuments } return nil } -func (s *Store) SessionCreate(ctx context.Context, session models.Session) (*models.Session, error) { +func (s *Store) SessionCreate(ctx context.Context, session models.Session) (string, error) { session.StartedAt = clock.Now() session.LastSeen = session.StartedAt session.Recorded = false - device, err := s.DeviceGet(ctx, session.DeviceUID) + device, err := s.DeviceResolve(ctx, store.DeviceUIDResolver, string(session.DeviceUID)) if err != nil { - return nil, FromMongoError(err) + return "", FromMongoError(err) } session.TenantID = device.TenantID if _, err := s.db.Collection("sessions").InsertOne(ctx, &session); err != nil { - return nil, FromMongoError(err) - } - - as := &models.ActiveSession{ - UID: models.UID(session.UID), - LastSeen: session.StartedAt, - TenantID: session.TenantID, - } - - if _, err := s.db.Collection("active_sessions").InsertOne(ctx, &as); err != nil { - return nil, FromMongoError(err) + return "", FromMongoError(err) } - return &session, nil + return session.UID, nil } -func (s *Store) SessionSetLastSeen(ctx context.Context, uid models.UID) error { - session := models.Session{} - - err := s.db.Collection("sessions").FindOne(ctx, bson.M{"uid": uid}).Decode(&session) - if err != nil { - return FromMongoError(err) - } - - if session.Closed { - return nil - } - - session.LastSeen = clock.Now() - - opts := options.Update().SetUpsert(true) - _, err = s.db.Collection("sessions").UpdateOne(ctx, bson.M{"uid": session.UID}, bson.M{"$set": session}, opts) +func (s *Store) SessionUpdateDeviceUID(ctx context.Context, oldUID models.UID, newUID models.UID) error { + session, err := s.db.Collection("sessions").UpdateMany(ctx, bson.M{"device_uid": oldUID}, bson.M{"$set": bson.M{"device_uid": newUID}}) if err != nil { return FromMongoError(err) } - if _, err := s.db.Collection("active_sessions").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"last_seen": clock.Now()}}); err != nil { - return FromMongoError(err) + if session.MatchedCount < 1 { + return store.ErrNoDocuments } return nil } -// SessionDeleteActives sets a session's "closed" status to true and deletes all related active_sessions. -func (s *Store) SessionDeleteActives(ctx context.Context, uid models.UID) error { - mongoSession, err := s.db.Client().StartSession() - if err != nil { - return FromMongoError(err) +func (s *Store) ActiveSessionResolve(ctx context.Context, resolver store.SessionResolver, value string) (*models.ActiveSession, error) { + var uid models.UID + switch resolver { + case store.SessionUIDResolver: + uid = models.UID(value) + default: + return nil, store.ErrNoDocuments } - defer mongoSession.EndSession(ctx) - - _, err = mongoSession.WithTransaction(ctx, func(mongoctx mongo.SessionContext) (interface{}, error) { - session := new(models.Session) - - query := bson.M{"uid": uid} - update := bson.M{"$set": bson.M{"last_seen": clock.Now(), "closed": true}} - - if err := s.db.Collection("sessions").FindOneAndUpdate(ctx, query, update).Decode(&session); err != nil { - return nil, FromMongoError(err) - } - - _, err := s.db.Collection("active_sessions").DeleteMany(ctx, bson.M{"uid": session.UID}) + activeSession := new(models.ActiveSession) + if err := s.db.Collection("active_sessions").FindOne(ctx, bson.M{"uid": uid}).Decode(activeSession); err != nil { return nil, FromMongoError(err) - }) - - return err -} - -func (s *Store) SessionCreateRecordFrame(ctx context.Context, uid models.UID, recordSession *models.RecordedSession) error { - mongoSession, err := s.db.Client().StartSession() - if err != nil { - return FromMongoError(err) } - defer mongoSession.EndSession(ctx) - - _, err = mongoSession.WithTransaction(ctx, func(mongoctx mongo.SessionContext) (interface{}, error) { - session, err := s.db.Collection("sessions").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"recorded": true}}) - if err != nil { - return nil, FromMongoError(err) - } - - if session.MatchedCount < 1 { - return nil, store.ErrNoDocuments - } - - if _, err := s.db.Collection("recorded_sessions").InsertOne(ctx, &recordSession); err != nil { - return nil, FromMongoError(err) - } - - return nil, err - }) - return err + return activeSession, nil } -func (s *Store) SessionUpdateDeviceUID(ctx context.Context, oldUID models.UID, newUID models.UID) error { - session, err := s.db.Collection("sessions").UpdateMany(ctx, bson.M{"device_uid": oldUID}, bson.M{"$set": bson.M{"device_uid": newUID}}) +func (s *Store) ActiveSessionCreate(ctx context.Context, session *models.Session) error { + _, err := s.db.Collection("active_sessions"). + InsertOne(ctx, &models.ActiveSession{UID: models.UID(session.UID), LastSeen: session.StartedAt, TenantID: session.TenantID}) if err != nil { return FromMongoError(err) } - if session.MatchedCount < 1 { - return store.ErrNoDocuments - } - return nil } -func (s *Store) SessionDeleteRecordFrame(ctx context.Context, uid models.UID) error { - session, err := s.db.Collection("recorded_sessions").DeleteMany(ctx, bson.M{"uid": uid}) +func (s *Store) ActiveSessionUpdate(ctx context.Context, activeSession *models.ActiveSession) error { + r, err := s.db.Collection("active_sessions").UpdateOne(ctx, bson.M{"uid": activeSession.UID}, bson.M{"$set": activeSession}) if err != nil { return FromMongoError(err) } - if session.DeletedCount < 1 { + if r.MatchedCount < 1 { return store.ErrNoDocuments } return nil } -// SessionDeleteRecordFrameByDate deletes recorded sessions and updates session records -// before the specified date. -// -// It takes a time 'lte', representing the maximum date. The method deletes all recorded sessions -// with a 'time' field less than or equal to 'lte' It also updates 'sessions' records by setting -// the 'recorded' field to false for sessions that started before 'lte' and are marked as recorded. -// -// The method returns the count of deleted sessions, the count of updated session records, -// and any encountered error during the operation. -func (s *Store) SessionDeleteRecordFrameByDate(ctx context.Context, lte time.Time) (deletedCount int64, updatedCount int64, err error) { +func (s *Store) ActiveSessionDelete(ctx context.Context, uid models.UID) error { mongoSession, err := s.db.Client().StartSession() if err != nil { - return deletedCount, updatedCount, FromMongoError(err) + return FromMongoError(err) } defer mongoSession.EndSession(ctx) - _, err = mongoSession.WithTransaction(ctx, func(mongoctx mongo.SessionContext) (interface{}, error) { - d, err := s.db.Collection("recorded_sessions").DeleteMany( - ctx, - bson.M{ - "time": bson.D{ - {Key: "$lte", Value: lte}, - }, - }, - ) + _, err = mongoSession.WithTransaction(ctx, func(_ mongo.SessionContext) (any, error) { + r, err := s.db.Collection("sessions").UpdateOne(ctx, bson.M{"uid": uid}, bson.M{"$set": bson.M{"last_seen": clock.Now(), "closed": true}}) if err != nil { - return nil, err + return nil, FromMongoError(err) } - u, err := s.db.Collection("sessions").UpdateMany( - ctx, - bson.M{ - "started_at": bson.D{ - {Key: "$lte", Value: lte}, - }, - "recorded": bson.M{ - "$eq": true, - }, - }, - bson.M{ - "$set": bson.M{ - "recorded": false, - }, - }, - ) - if err != nil { - return nil, err + if r.MatchedCount < 1 { + return nil, store.ErrNoDocuments } - deletedCount = d.DeletedCount - updatedCount = u.ModifiedCount + if _, err := s.db.Collection("active_sessions").DeleteMany(ctx, bson.M{"uid": uid}); err != nil { + return nil, FromMongoError(err) + } return nil, nil }) - return deletedCount, updatedCount, FromMongoError(err) + return err } -func (s *Store) SessionGetRecordFrame(ctx context.Context, uid models.UID) ([]models.RecordedSession, int, error) { - sessionRecord := make([]models.RecordedSession, 0) +func (s *Store) SessionEventsCreate(ctx context.Context, event *models.SessionEvent) error { + if _, err := s.db.Collection("sessions_events").InsertOne(ctx, event); err != nil { + return FromMongoError(err) + } + + return nil +} +func (s *Store) SessionEventsList(ctx context.Context, uid models.UID, seat int, event models.SessionEventType, opts ...store.QueryOption) ([]models.SessionEvent, int, error) { query := []bson.M{ { - "$match": bson.M{"uid": uid}, + "$match": bson.M{ + "session": uid, + "seat": seat, + "type": event, + }, + }, + { + "$sort": bson.M{ + "timestamp": 1, + }, }, } - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }) + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, 0, err + } } - cursor, err := s.db.Collection("recorded_sessions").Aggregate(ctx, query) + + count, err := CountAllMatchingDocuments(ctx, s.db.Collection("sessions_events"), query) if err != nil { - return sessionRecord, 0, err + return nil, 0, FromMongoError(err) } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - record := new(models.RecordedSession) - err = cursor.Decode(&record) - if err != nil { - return sessionRecord, 0, err - } - - sessionRecord = append(sessionRecord, *record) + cursosr, err := s.db.Collection("sessions_events").Aggregate(ctx, query) + if err != nil { + return nil, 0, FromMongoError(err) } - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }) + events := make([]models.SessionEvent, 0) + if err := cursosr.All(ctx, events); err != nil { + return nil, 0, FromMongoError(err) } - query = append(query, bson.M{ - "$count": "count", - }) + return events, count, nil +} - count, err := AggregateCount(ctx, s.db.Collection("recorded_sessions"), query) - if err != nil { - return nil, 0, err +func (s *Store) SessionEventsDelete(ctx context.Context, uid models.UID, seat int, event models.SessionEventType) error { + if _, err := s.db.Collection("sessions_events").DeleteMany(ctx, bson.M{"session": uid, "seat": seat, "type": event}); err != nil { + return FromMongoError(err) } - return sessionRecord, count, nil + return nil } diff --git a/api/store/mongo/session_test.go b/api/store/mongo/session_test.go index e71dd500668..0b3c9f74ab1 100644 --- a/api/store/mongo/session_test.go +++ b/api/store/mongo/session_test.go @@ -1,4 +1,4 @@ -package mongo +package mongo_test import ( "context" @@ -6,11 +6,8 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" ) @@ -24,19 +21,19 @@ func TestSessionList(t *testing.T) { cases := []struct { description string - page paginator.Query + opts []store.QueryOption fixtures []string expected Expected }{ { description: "succeeds when sessions are found", - page: paginator.Query{Page: -1, PerPage: -1}, + opts: []store.QueryOption{s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1})}, fixtures: []string{ - fixtures.FixtureNamespaces, - fixtures.FixtureDevices, - fixtures.FixtureConnectedDevices, - fixtures.FixtureSessions, - fixtures.FixtureActiveSessions, + fixtureNamespaces, + fixtureTags, + fixtureDevices, + fixtureSessions, + fixtureActiveSessions, }, expected: Expected{ s: []models.Session{ @@ -49,24 +46,40 @@ func TestSessionList(t *testing.T) { Username: "john_doe", IPAddress: "0.0.0.0", Device: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, Active: true, Closed: true, @@ -75,6 +88,7 @@ func TestSessionList(t *testing.T) { Type: "shell", Term: "xterm", Position: models.SessionPosition{Longitude: 0, Latitude: 0}, + Events: models.SessionEvents{Types: []string{}, Seats: []int{}}, }, { StartedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), @@ -85,24 +99,40 @@ func TestSessionList(t *testing.T) { Username: "john_doe", IPAddress: "0.0.0.0", Device: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, Active: false, Closed: true, @@ -111,6 +141,7 @@ func TestSessionList(t *testing.T) { Type: "shell", Term: "xterm", Position: models.SessionPosition{Longitude: 45.6789, Latitude: -12.3456}, + Events: models.SessionEvents{Types: []string{}, Seats: []int{}}, }, { StartedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), @@ -121,24 +152,40 @@ func TestSessionList(t *testing.T) { Username: "john_doe", IPAddress: "0.0.0.0", Device: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, Active: false, Closed: true, @@ -147,6 +194,7 @@ func TestSessionList(t *testing.T) { Type: "exec", Term: "", Position: models.SessionPosition{Longitude: -78.9012, Latitude: 23.4567}, + Events: models.SessionEvents{Types: []string{}, Seats: []int{}}, }, { StartedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), @@ -157,24 +205,40 @@ func TestSessionList(t *testing.T) { Username: "john_doe", IPAddress: "0.0.0.0", Device: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, Active: false, Closed: true, @@ -183,6 +247,7 @@ func TestSessionList(t *testing.T) { Type: "shell", Term: "xterm", Position: models.SessionPosition{Longitude: -56.7890, Latitude: 34.5678}, + Events: models.SessionEvents{Types: []string{}, Seats: []int{}}, }, }, count: 4, @@ -191,12 +256,6 @@ func TestSessionList(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - // Due to the non-deterministic order of applying fixtures when dealing with multiple datasets, // we ensure that both the expected and result arrays are correctly sorted. sort := func(s []models.Session) { @@ -207,18 +266,24 @@ func TestSessionList(t *testing.T) { for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + s, count, err := s.SessionList(ctx, tc.opts...) - s, count, err := mongostore.SessionList(context.TODO(), tc.page) sort(tc.expected.s) sort(s) + assert.Equal(t, tc.expected, Expected{s: s, count: count, err: err}) }) } } -func TestSessionGet(t *testing.T) { +func TestSessionResolve(t *testing.T) { type Expected struct { s *models.Session err error @@ -226,19 +291,21 @@ func TestSessionGet(t *testing.T) { cases := []struct { description string - UID models.UID + resolver store.SessionResolver + value string fixtures []string expected Expected }{ { description: "fails when session is not found", - UID: models.UID("nonexistent"), + resolver: store.SessionUIDResolver, + value: "nonexistent", fixtures: []string{ - fixtures.FixtureNamespaces, - fixtures.FixtureDevices, - fixtures.FixtureConnectedDevices, - fixtures.FixtureSessions, - fixtures.FixtureActiveSessions, + fixtureNamespaces, + fixtureTags, + fixtureDevices, + fixtureSessions, + fixtureActiveSessions, }, expected: Expected{ s: nil, @@ -247,13 +314,14 @@ func TestSessionGet(t *testing.T) { }, { description: "succeeds when session is found", - UID: models.UID("a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68"), + resolver: store.SessionUIDResolver, + value: "a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68", fixtures: []string{ - fixtures.FixtureNamespaces, - fixtures.FixtureDevices, - fixtures.FixtureConnectedDevices, - fixtures.FixtureSessions, - fixtures.FixtureActiveSessions, + fixtureNamespaces, + fixtureTags, + fixtureDevices, + fixtureSessions, + fixtureActiveSessions, }, expected: Expected{ s: &models.Session{ @@ -265,24 +333,40 @@ func TestSessionGet(t *testing.T) { Username: "john_doe", IPAddress: "0.0.0.0", Device: &models.Device{ - CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "device-3", - Identity: &models.DeviceIdentity{MAC: "mac-3"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace-1", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag-1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + StatusUpdatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastSeen: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", + Name: "device-3", + Identity: &models.DeviceIdentity{MAC: "mac-3"}, + Info: nil, + PublicKey: "", + TenantID: "00000000-0000-4000-0000-000000000000", + Online: false, + Namespace: "namespace-1", + Status: "accepted", + RemoteAddr: "", + Position: nil, + Acceptable: false, + Taggable: models.Taggable{ + TagIDs: []string{"6791d3ae04ba86e6d7a0514d", "6791d3be5a201d874c4c2885"}, + Tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + }, }, Active: true, Closed: true, @@ -291,24 +375,23 @@ func TestSessionGet(t *testing.T) { Type: "shell", Term: "xterm", Position: models.SessionPosition{Longitude: 0, Latitude: 0}, + Events: models.SessionEvents{Types: []string{}, Seats: []int{}}, }, err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - s, err := mongostore.SessionGet(context.TODO(), tc.UID) + s, err := s.SessionResolve(ctx, tc.resolver, tc.value) assert.Equal(t, tc.expected, Expected{s: s, err: err}) }) } @@ -323,7 +406,7 @@ func TestSessionCreate(t *testing.T) { }{ { description: "", - fixtures: []string{fixtures.FixtureDevices, fixtures.FixtureNamespaces}, + fixtures: []string{fixtureDevices, fixtureNamespaces}, session: models.Session{ Username: "username", UID: "uid", @@ -336,20 +419,18 @@ func TestSessionCreate(t *testing.T) { }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - session, err := mongostore.SessionCreate(context.TODO(), tc.session) + uid, err := s.SessionCreate(ctx, tc.session) assert.Equal(t, tc.expected, err) - assert.NotEmpty(t, session) + assert.NotEmpty(t, uid) }) } } @@ -366,156 +447,83 @@ func TestSessionUpdateDeviceUID(t *testing.T) { description: "fails when device is not found", oldUID: models.UID("nonexistent"), newUID: models.UID("uid"), - fixtures: []string{fixtures.FixtureSessions}, + fixtures: []string{fixtureSessions}, expected: store.ErrNoDocuments, }, { description: "succeeds when device is found", oldUID: models.UID("2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c"), newUID: models.UID("uid"), - fixtures: []string{fixtures.FixtureSessions}, + fixtures: []string{fixtureSessions}, expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - err := mongostore.SessionUpdateDeviceUID(context.TODO(), tc.oldUID, tc.newUID) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestSessionSetAuthenticated(t *testing.T) { - cases := []struct { - description string - UID models.UID - authenticate bool - fixtures []string - expected error - }{ - { - description: "fails when session is not found", - UID: models.UID("nonexistent"), - authenticate: false, - fixtures: []string{fixtures.FixtureSessions}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when session is found", - UID: models.UID("a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68"), - authenticate: false, - fixtures: []string{fixtures.FixtureSessions}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.SessionSetAuthenticated(context.TODO(), tc.UID, tc.authenticate) + err := s.SessionUpdateDeviceUID(ctx, tc.oldUID, tc.newUID) assert.Equal(t, tc.expected, err) }) } } -func TestSessionSetRecorded(t *testing.T) { +// TestSessionUpdate exercises different update paths for the SessionUpdate method. +func TestSessionUpdate(t *testing.T) { cases := []struct { - description string - UID models.UID - authenticate bool - fixtures []string - expected error + description string + session *models.Session + fixtures []string + expected error }{ { - description: "fails when session is not found", - UID: models.UID("nonexistent"), - authenticate: false, - fixtures: []string{fixtures.FixtureSessions}, - expected: store.ErrNoDocuments, + description: "succeeds when session is found", + session: &models.Session{UID: "a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68", Authenticated: true}, + fixtures: []string{fixtureSessions}, + expected: nil, }, { - description: "succeeds when session is found", - UID: models.UID("a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68"), - authenticate: false, - fixtures: []string{fixtures.FixtureSessions}, - expected: nil, + description: "succeeds when setting Authenticated to true", + session: &models.Session{UID: "e7f3a56d8b9e1dc4c285c98c8ea9c33032a17bda5b6c6b05a6213c2a02f97824", Authenticated: true, StartedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), TenantID: "00000000-0000-4000-0000-000000000000"}, + fixtures: []string{fixtureSessions}, + expected: nil, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.SessionSetAuthenticated(context.TODO(), tc.UID, tc.authenticate) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestSessionSetLastSeen(t *testing.T) { - cases := []struct { - description string - UID models.UID - fixtures []string - expected error - }{ { - description: "fails when session is not found", - UID: models.UID("nonexistent"), - fixtures: []string{fixtures.FixtureSessions}, - expected: store.ErrNoDocuments, + description: "succeeds when updating Type field", + session: &models.Session{UID: "fc2e1493d8b6a4c17bf6a2f7f9e55629e384b2d3a21e0c3d90f6e35b0c946178a", Type: "exec"}, + fixtures: []string{fixtureSessions}, + expected: nil, }, { - description: "succeeds when session is found", - UID: models.UID("a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68"), - fixtures: []string{fixtures.FixtureSessions}, + description: "succeeds when updating Recorded flag", + session: &models.Session{UID: "bc3d75821a29cfe70bf7986f9ee5629e384b2d3a21e0c3d90f6e35b0c946178a", Recorded: true}, + fixtures: []string{fixtureSessions}, expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - err := mongostore.SessionSetLastSeen(context.TODO(), tc.UID) + err := s.SessionUpdate(ctx, tc.session) assert.Equal(t, tc.expected, err) }) } } -func TestSessionDeleteActives(t *testing.T) { +func TestActiveSessionDelete(t *testing.T) { cases := []struct { description string UID models.UID @@ -525,229 +533,124 @@ func TestSessionDeleteActives(t *testing.T) { { description: "fails when session is not found", UID: models.UID("nonexistent"), - fixtures: []string{fixtures.FixtureSessions}, + fixtures: []string{fixtureSessions}, expected: store.ErrNoDocuments, }, { description: "succeeds when session is found", UID: models.UID("a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68"), - fixtures: []string{fixtures.FixtureSessions}, + fixtures: []string{fixtureSessions}, expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - err := mongostore.SessionDeleteActives(context.TODO(), tc.UID) + err := s.ActiveSessionDelete(ctx, tc.UID) assert.Equal(t, tc.expected, err) }) } } -func TestSessionGetRecordFrame(t *testing.T) { +func TestActiveSessionResolve(t *testing.T) { type Expected struct { - r []models.RecordedSession - count int - err error + activeSession *models.ActiveSession + err error } cases := []struct { description string - UID models.UID + resolver store.SessionResolver + value string fixtures []string expected Expected }{ { - description: "succeeds", - UID: models.UID("e7f3a56d8b9e1dc4c285c98c8ea9c33032a17bda5b6c6b05a6213c2a02f97824"), - fixtures: []string{fixtures.FixtureSessions, fixtures.FixtureRecordedSessions}, + description: "fails when active session is not found", + resolver: store.SessionUIDResolver, + value: "nonexistent", + fixtures: []string{fixtureActiveSessions}, expected: Expected{ - r: []models.RecordedSession{ - { - Time: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), - UID: "e7f3a56d8b9e1dc4c285c98c8ea9c33032a17bda5b6c6b05a6213c2a02f97824", - Message: "message", - TenantID: "00000000-0000-4000-0000-000000000000", - Width: 0, - Height: 0, - }, + activeSession: nil, + err: store.ErrNoDocuments, + }, + }, + { + description: "succeeds when active session is found", + resolver: store.SessionUIDResolver, + value: "a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68", + fixtures: []string{fixtureActiveSessions}, + expected: Expected{ + activeSession: &models.ActiveSession{ + UID: "a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68", + LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + TenantID: "00000000-0000-4000-0000-000000000000", }, - count: 1, - err: nil, + err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - r, count, err := mongostore.SessionGetRecordFrame(context.TODO(), tc.UID) - assert.Equal(t, tc.expected, Expected{r: r, count: count, err: err}) + activeSession, err := s.ActiveSessionResolve(ctx, tc.resolver, tc.value) + assert.Equal(t, tc.expected, Expected{activeSession: activeSession, err: err}) }) } } -func TestSessionCreateRecordFrame(t *testing.T) { +func TestActiveSessionUpdate(t *testing.T) { cases := []struct { - description string - UID models.UID - record *models.RecordedSession - fixtures []string - expected error + description string + activeSession *models.ActiveSession + fixtures []string + expected error }{ { - description: "fails when session is not found", - UID: models.UID("nonexistent"), - record: &models.RecordedSession{ - UID: models.UID("nonexistent"), - Message: "message", - TenantID: "00000000-0000-4000-0000-000000000000", - Time: time.Now(), - Width: 0, - Height: 0, + description: "fails when active session is not found", + activeSession: &models.ActiveSession{ + UID: "nonexistent", + LastSeen: time.Now(), }, - fixtures: []string{fixtures.FixtureSessions}, + fixtures: []string{fixtureActiveSessions}, expected: store.ErrNoDocuments, }, { - description: "succeeds when session is found", - UID: models.UID("a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68"), - record: &models.RecordedSession{ - UID: models.UID("a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68"), - Message: "message", - TenantID: "00000000-0000-4000-0000-000000000000", - Time: time.Now(), - Width: 0, - Height: 0, + description: "succeeds when active session is found", + activeSession: &models.ActiveSession{ + UID: "a3b0431f5df6a7827945d2e34872a5c781452bc36de42f8b1297fd9ecb012f68", + LastSeen: time.Date(2023, 2, 1, 12, 0, 0, 0, time.UTC), }, - fixtures: []string{fixtures.FixtureSessions}, + fixtures: []string{fixtureActiveSessions}, expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - err := mongostore.SessionCreateRecordFrame(context.TODO(), tc.UID, tc.record) - assert.Equal(t, tc.expected, err) - }) - } -} + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) -func TestSessionDeleteRecordFrame(t *testing.T) { - cases := []struct { - description string - UID models.UID - fixtures []string - expected error - }{ - { - description: "fails when record frame is not found", - UID: models.UID("nonexistent"), - fixtures: []string{fixtures.FixtureSessions, fixtures.FixtureRecordedSessions}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when record frame is found", - UID: models.UID("e7f3a56d8b9e1dc4c285c98c8ea9c33032a17bda5b6c6b05a6213c2a02f97824"), - fixtures: []string{fixtures.FixtureSessions, fixtures.FixtureRecordedSessions}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.SessionDeleteRecordFrame(context.TODO(), tc.UID) + err := s.ActiveSessionUpdate(ctx, tc.activeSession) assert.Equal(t, tc.expected, err) }) } } - -func TestSessionDeleteRecordFrameByDate(t *testing.T) { - type Expected struct { - deletedCount int64 - updatedCount int64 - err error - } - - cases := []struct { - description string - lte time.Time - fixtures []string - expected Expected - }{ - { - description: "succeeds when there are no sessions to update or delete", - lte: time.Date(2023, time.January, 30, 12, 00, 0, 0, time.UTC), - fixtures: []string{}, - expected: Expected{ - deletedCount: 0, - updatedCount: 0, - err: nil, - }, - }, - { - description: "succeeds to delete and update recorded sessions before specified date", - lte: time.Date(2023, time.January, 30, 12, 00, 0, 0, time.UTC), - fixtures: []string{ - fixtures.FixtureSessions, - fixtures.FixtureRecordedSessions, - }, - expected: Expected{ - deletedCount: 2, - updatedCount: 2, - err: nil, - }, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint:errcheck - - deletedCount, updatedCount, err := mongostore.SessionDeleteRecordFrameByDate(context.TODO(), tc.lte) - assert.Equal(t, tc.expected, Expected{deletedCount, updatedCount, err}) - }) - } -} diff --git a/api/store/mongo/stats.go b/api/store/mongo/stats.go index 4f6ad330433..3aabb8efc7c 100644 --- a/api/store/mongo/stats.go +++ b/api/store/mongo/stats.go @@ -2,134 +2,101 @@ package mongo import ( "context" + "time" - "github.com/shellhub-io/shellhub/api/pkg/gateway" "github.com/shellhub-io/shellhub/pkg/models" "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" ) -func (s *Store) GetStats(ctx context.Context) (*models.Stats, error) { - query := []bson.M{ - {"$group": bson.M{"_id": bson.M{"uid": "$uid"}, "count": bson.M{"$sum": 1}}}, - {"$group": bson.M{"_id": bson.M{"uid": "$uid"}, "count": bson.M{"$sum": 1}}}, - } - - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append([]bson.M{{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }}, query...) - } - - query = append([]bson.M{{ - "$match": bson.M{ - "status": "accepted", - }, - }}, query...) - - onlineDevices, err := AggregateCount(ctx, s.db.Collection("connected_devices"), query) +func (s *Store) GetStats(ctx context.Context, tenantID string) (*models.Stats, error) { + onlineDevicesQuery := buildOnlineDevicesQuery(tenantID) + onlineDevices, err := CountAllMatchingDocuments(ctx, s.db.Collection("devices"), onlineDevicesQuery) if err != nil { return nil, err } - query = []bson.M{ - {"$count": "count"}, + registeredDevicesQuery := buildRegisteredDevicesQuery(tenantID) + registeredDevices, err := CountAllMatchingDocuments(ctx, s.db.Collection("devices"), registeredDevicesQuery) + if err != nil { + return nil, err } - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append([]bson.M{{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }}, query...) + pendingDevicesQuery := buildPendingDevicesQuery(tenantID) + pendingDevices, err := CountAllMatchingDocuments(ctx, s.db.Collection("devices"), pendingDevicesQuery) + if err != nil { + return nil, err } - query = append([]bson.M{{ - "$match": bson.M{ - "status": "accepted", - }, - }}, query...) - registeredDevices, err := AggregateCount(ctx, s.db.Collection("devices"), query) + rejectedDevicesQuery := buildRejectedDevicesQuery(tenantID) + rejectedDevices, err := CountAllMatchingDocuments(ctx, s.db.Collection("devices"), rejectedDevicesQuery) if err != nil { return nil, err } - query = []bson.M{ - {"$count": "count"}, + activeSessionsQuery := buildActiveSessionsQuery(tenantID) + activeSessions, err := CountAllMatchingDocuments(ctx, s.db.Collection("active_sessions"), activeSessionsQuery) + if err != nil { + return nil, err } - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append([]bson.M{{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }}, query...) + stats := &models.Stats{ + RegisteredDevices: registeredDevices, + OnlineDevices: onlineDevices, + PendingDevices: pendingDevices, + RejectedDevices: rejectedDevices, + ActiveSessions: activeSessions, } - query = append([]bson.M{{ - "$match": bson.M{ - "status": "pending", - }, - }}, query...) + return stats, nil +} - pendingDevices, err := AggregateCount(ctx, s.db.Collection("devices"), query) - if err != nil { - return nil, err +func buildOnlineDevicesQuery(tenantID string) []bson.M { + match := bson.M{ + "disconnected_at": nil, + "last_seen": bson.M{"$gt": primitive.NewDateTimeFromTime(time.Now().Add(-2 * time.Minute))}, + "status": models.DeviceStatusAccepted, } - query = []bson.M{ - {"$count": "count"}, + if tenantID != "" { + match["tenant_id"] = tenantID } - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append([]bson.M{{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }}, query...) + return []bson.M{{"$match": match}} +} + +func buildRegisteredDevicesQuery(tenantID string) []bson.M { + match := bson.M{"status": models.DeviceStatusAccepted} + if tenantID != "" { + match["tenant_id"] = tenantID } - query = append([]bson.M{{ - "$match": bson.M{ - "status": "rejected", - }, - }}, query...) + return []bson.M{{"$match": match}} +} - rejectedDevices, err := AggregateCount(ctx, s.db.Collection("devices"), query) - if err != nil { - return nil, err +func buildPendingDevicesQuery(tenantID string) []bson.M { + match := bson.M{"status": models.DeviceStatusPending} + if tenantID != "" { + match["tenant_id"] = tenantID } - query = []bson.M{} + return []bson.M{{"$match": match}} +} - // Only match for the respective tenant if requested - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }) +func buildRejectedDevicesQuery(tenantID string) []bson.M { + match := bson.M{"status": models.DeviceStatusRejected} + if tenantID != "" { + match["tenant_id"] = tenantID } - query = append(query, bson.M{ - "$count": "count", - }) + return []bson.M{{"$match": match}} +} - activeSessions, err := AggregateCount(ctx, s.db.Collection("active_sessions"), query) - if err != nil { - return nil, err +func buildActiveSessionsQuery(tenantID string) []bson.M { + match := bson.M{} + if tenantID != "" { + match["tenant_id"] = tenantID } - return &models.Stats{ - RegisteredDevices: registeredDevices, - OnlineDevices: onlineDevices, - PendingDevices: pendingDevices, - RejectedDevices: rejectedDevices, - ActiveSessions: activeSessions, - }, nil + return []bson.M{{"$match": match}} } diff --git a/api/store/mongo/stats_test.go b/api/store/mongo/stats_test.go index a4283e5377b..0932b70abcd 100644 --- a/api/store/mongo/stats_test.go +++ b/api/store/mongo/stats_test.go @@ -1,12 +1,9 @@ -package mongo +package mongo_test import ( "context" "testing" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" ) @@ -20,22 +17,32 @@ func TestGetStats(t *testing.T) { cases := []struct { description string fixtures []string + tenantID string expected Expected }{ { - description: "succeeds", - fixtures: []string{ - fixtures.FixtureUsers, - fixtures.FixtureNamespaces, - fixtures.FixtureSessions, - fixtures.FixtureActiveSessions, - fixtures.FixtureDevices, - fixtures.FixtureConnectedDevices, + description: "succeeds without tenantID", + fixtures: []string{fixtureUsers, fixtureNamespaces, fixtureSessions, fixtureActiveSessions, fixtureDevices}, + tenantID: "", + expected: Expected{ + stats: &models.Stats{ + RegisteredDevices: 3, + OnlineDevices: 0, + ActiveSessions: 1, + PendingDevices: 1, + RejectedDevices: 0, + }, + err: nil, }, + }, + { + description: "succeeds with specific tenantID", + fixtures: []string{fixtureUsers, fixtureNamespaces, fixtureSessions, fixtureActiveSessions, fixtureDevices}, + tenantID: "00000000-0000-4000-0000-000000000000", expected: Expected{ stats: &models.Stats{ RegisteredDevices: 3, - OnlineDevices: 1, + OnlineDevices: 0, ActiveSessions: 1, PendingDevices: 1, RejectedDevices: 0, @@ -43,20 +50,33 @@ func TestGetStats(t *testing.T) { err: nil, }, }, + { + description: "succeeds with non-existent tenantID", + fixtures: []string{fixtureUsers, fixtureNamespaces, fixtureSessions, fixtureActiveSessions, fixtureDevices}, + tenantID: "99999999-9999-4999-9999-999999999999", + expected: Expected{ + stats: &models.Stats{ + RegisteredDevices: 0, + OnlineDevices: 0, + ActiveSessions: 0, + PendingDevices: 0, + RejectedDevices: 0, + }, + err: nil, + }, + }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - stats, err := mongostore.GetStats(context.TODO()) + stats, err := s.GetStats(ctx, tc.tenantID) assert.Equal(t, tc.expected, Expected{stats: stats, err: err}) }) } diff --git a/api/store/mongo/store.go b/api/store/mongo/store.go index ac6876e5cd8..b7d30e51863 100644 --- a/api/store/mongo/store.go +++ b/api/store/mongo/store.go @@ -5,9 +5,10 @@ import ( "errors" "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/api/store/mongo/options" "github.com/shellhub-io/shellhub/pkg/cache" "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" + mongooptions "go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/x/mongo/driver/connstring" ) @@ -16,55 +17,55 @@ var ( ErrNamespaceDuplicatedMember = errors.New("this member is already in this namespace") ErrNamespaceMemberNotFound = errors.New("this member does not exist in this namespace") ErrUserNotFound = errors.New("user not found") + ErrStoreParseURI = errors.New("fail to parse the Mongo URI") + ErrStoreConnect = errors.New("fail to connect to the database on Mongo URI") + ErrStorePing = errors.New("fail to ping the Mongo database") + ErrStoreApplyMigration = errors.New("fail to apply Mongo migrations") ) -type Store struct { - db *mongo.Database - cache cache.Cache -} +type queryOptions struct{} -var _ store.Store = (*Store)(nil) - -func NewStore(db *mongo.Database, cache cache.Cache) *Store { - return &Store{db: db, cache: cache} +type Store struct { + db *mongo.Database + options *queryOptions + cache cache.Cache } -func (s *Store) Database() *mongo.Database { +func (s *Store) GetDB() *mongo.Database { return s.db } -func (s *Store) Cache() cache.Cache { - return s.cache -} +func Connect(ctx context.Context, uri string) (*mongo.Client, *mongo.Database, error) { + client, err := mongo.Connect(ctx, mongooptions.Client().ApplyURI(uri)) + if err != nil { + return nil, nil, errors.Join(ErrStoreConnect, err) + } -var ( - ErrStoreParseURI = errors.New("fail to parse the Mongo URI") - ErrStoreConnect = errors.New("fail to connect to the database on Mongo URI") - ErrStorePing = errors.New("fail to ping the Mongo database") - ErrStoreApplyMigration = errors.New("fail to apply Mongo migrations") -) + if err := client.Ping(ctx, nil); err != nil { + return nil, nil, errors.Join(ErrStorePing, err) + } -func NewStoreMongo(ctx context.Context, cache cache.Cache, uri string) (store.Store, error) { connStr, err := connstring.ParseAndValidate(uri) if err != nil { - return nil, errors.Join(ErrStoreParseURI, err) + return nil, nil, errors.Join(ErrStoreParseURI, err) } - clientOptions := options.Client().ApplyURI(uri) - client, err := mongo.Connect(ctx, clientOptions) - if err != nil { - return nil, errors.Join(ErrStoreConnect, err) - } + return client, client.Database(connStr.Database), nil +} - if err = client.Ping(ctx, nil); err != nil { - return nil, errors.Join(ErrStorePing, err) +func NewStore(ctx context.Context, uri string, cache cache.Cache, opts ...options.DatabaseOpt) (store.Store, error) { + _, db, err := Connect(ctx, uri) + if err != nil { + return nil, err } - db := client.Database(connStr.Database) + store := &Store{db: db, cache: cache, options: &queryOptions{}} - if err := ApplyMigrations(db); err != nil { - return nil, errors.Join(ErrStoreApplyMigration, err) + for _, opt := range opts { + if err := opt(ctx, store.db); err != nil { + return nil, err + } } - return &Store{db: db, cache: cache}, nil + return store, nil } diff --git a/api/store/mongo/store_test.go b/api/store/mongo/store_test.go new file mode 100644 index 00000000000..04aceeabe3b --- /dev/null +++ b/api/store/mongo/store_test.go @@ -0,0 +1,146 @@ +package mongo_test + +import ( + "context" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/shellhub-io/mongotest" + "github.com/shellhub-io/shellhub/api/pkg/dbtest" + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/api/store/mongo" + "github.com/shellhub-io/shellhub/pkg/cache" + log "github.com/sirupsen/logrus" + "go.mongodb.org/mongo-driver/bson/primitive" + mongodb "go.mongodb.org/mongo-driver/mongo" +) + +var ( + srv = &dbtest.Server{} + db *mongodb.Database + s store.Store +) + +const ( + fixtureAPIKeys = "api-key" // Check "store.mongo.fixtures.api-keys" for fixture info + fixtureDevices = "devices" // Check "store.mongo.fixtures.devices" for fixture info + fixtureDevicesWithRemoved = "devices_with_removed" // Check "store.mongo.fixtures.devices_with_removed" for fixture info + fixtureSessions = "sessions" // Check "store.mongo.fixtures.sessions" for fixture info + fixtureActiveSessions = "active_sessions" // Check "store.mongo.fixtures.active_sessions" for fixture info + fixtureFirewallRules = "firewall_rules" // Check "store.mongo.fixtures.firewall_rules" for fixture info + fixturePublicKeys = "public_keys" // Check "store.mongo.fixtures.public_keys" for fixture info + fixturePrivateKeys = "private_keys" // Check "store.mongo.fixtures.private_keys" for fixture info + fixtureUsers = "users" // Check "store.mongo.fixtures.users" for fixture iefo + fixtureNamespaces = "namespaces" // Check "store.mongo.fixtures.namespaces" for fixture info + fixtureRecoveryTokens = "recovery_tokens" // Check "store.mongo.fixtures.recovery_tokens" for fixture info + fixtureTags = "tags" // Check "store.mongo.fixtures.tags" for fixture info + fixtureUserInvitations = "user_invitations" // Check "store.mongo.fixtures.user_invitations" for fixture info + fixtureMembershipInvitations = "membership_invitations" // Check "store.mongo.fixtures.membership_invitations" for fixture info +) + +func TestMain(m *testing.M) { + log.Info("Starting store tests") + + ctx := context.Background() + + srv.Container.Database = "test" + _, file, _, _ := runtime.Caller(0) + srv.Fixtures.Root = filepath.Join(filepath.Dir(file), "fixtures") + srv.Fixtures.PreInsertFuncs = []mongotest.PreInsertFunc{ + mongotest.SimpleConvertObjID("users", "_id"), + mongotest.SimpleConvertTime("users", "created_at"), + mongotest.SimpleConvertTime("users", "last_login"), + mongotest.SimpleConvertObjID("user_invitations", "_id"), + mongotest.SimpleConvertTime("user_invitations", "created_at"), + mongotest.SimpleConvertTime("user_invitations", "updated_at"), + mongotest.SimpleConvertObjID("membership_invitations", "_id"), + mongotest.SimpleConvertObjID("membership_invitations", "user_id"), + mongotest.SimpleConvertObjID("membership_invitations", "invited_by"), + mongotest.SimpleConvertTime("membership_invitations", "created_at"), + mongotest.SimpleConvertTime("membership_invitations", "updated_at"), + mongotest.SimpleConvertTime("membership_invitations", "status_updated_at"), + mongotest.SimpleConvertTime("membership_invitations", "expires_at"), + mongotest.SimpleConvertObjID("public_keys", "_id"), + mongotest.SimpleConvertBytes("public_keys", "data"), + mongotest.SimpleConvertTime("public_keys", "created_at"), + SimpleConvertArrayObjID("public_keys", "filter", "tag_ids"), + mongotest.SimpleConvertObjID("private_keys", "_id"), + mongotest.SimpleConvertBytes("private_keys", "data"), + mongotest.SimpleConvertTime("private_keys", "created_at"), + mongotest.SimpleConvertObjID("namespaces", "_id"), + mongotest.SimpleConvertTime("namespaces", "created_at"), + mongotest.SimpleConvertObjID("devices", "_id"), + mongotest.SimpleConvertTime("devices", "created_at"), + mongotest.SimpleConvertTime("devices", "last_seen"), + mongotest.SimpleConvertTime("devices", "status_updated_at"), + SimpleConvertArrayObjID("devices", "tag_ids"), + mongotest.SimpleConvertObjID("firewall_rules", "_id"), + mongotest.SimpleConvertObjID("sessions", "_id"), + mongotest.SimpleConvertTime("sessions", "started_at"), + mongotest.SimpleConvertTime("sessions", "last_seen"), + mongotest.SimpleConvertObjID("active_sessions", "_id"), + mongotest.SimpleConvertTime("active_sessions", "last_seen"), + mongotest.SimpleConvertObjID("tags", "_id"), + } + + if err := srv.Up(ctx); err != nil { + log.WithError(err).Error("Failed to UP the mongodb container") + os.Exit(1) + } + + log.Info("Connecting to ", srv.Container.ConnectionString) + + var err error + + s, err = mongo.NewStore(ctx, srv.Container.ConnectionString+"/"+srv.Container.Database, cache.NewNullCache()) + if err != nil { + log.WithError(err).Error("Failed to create the mongodb store") + os.Exit(1) + } + + store := s.(*mongo.Store) + db = store.GetDB() + + code := m.Run() + + log.Info("Stopping store tests") + if err := srv.Down(ctx); err != nil { + log.WithError(err).Error("Failed to DOWN the mongodb container") + os.Exit(1) + } + + os.Exit(code) +} + +func SimpleConvertArrayObjID(collectionName string, fieldPath ...string) mongotest.PreInsertFunc { + return func(collName string, doc mongotest.DocData) (mongotest.DocData, error) { + if collName != collectionName || len(fieldPath) == 0 { + return doc, nil + } + + convertNestedField(doc, fieldPath) + + return doc, nil + } +} + +func convertNestedField(current map[string]any, parts []string) { + switch len(parts) { + case 0: + return + case 1: + if arr, ok := current[parts[0]].([]any); ok { + for i, id := range arr { + arr[i], _ = primitive.ObjectIDFromHex(id.(string)) + } + } + + return + default: + if next, ok := current[parts[0]].(map[string]any); ok { + convertNestedField(next, parts[1:]) + } + } +} diff --git a/api/store/mongo/system.go b/api/store/mongo/system.go new file mode 100644 index 00000000000..614f4ad4b4c --- /dev/null +++ b/api/store/mongo/system.go @@ -0,0 +1,55 @@ +package mongo + +import ( + "context" + "time" + + "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/models" + log "github.com/sirupsen/logrus" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo/options" +) + +const ( + SystemCollection = "system" + SystemCacheTTL = 60 * time.Minute +) + +func (s *Store) SystemGet(ctx context.Context) (*models.System, error) { + if system, err := cache.Get[models.System](ctx, s.cache, SystemCollection); err == nil { + log.WithField("system", system).Warn("using system from cache") + + return system, nil + } + + result := s.db.Collection(SystemCollection).FindOne(ctx, bson.M{}) + if result.Err() != nil { + return nil, FromMongoError(result.Err()) + } + + var system *models.System + if err := result.Decode(&system); err != nil { + return nil, FromMongoError(err) + } + + if err := s.cache.Set(ctx, SystemCollection, system, SystemCacheTTL); err != nil { + log.WithField("system", system).Warn("failed to set the system data on cache") + } + + return system, nil +} + +func (s *Store) SystemSet(ctx context.Context, system *models.System) error { + upsert := true + _, err := s.db.Collection(SystemCollection).UpdateOne(ctx, bson.M{}, bson.M{"$set": system}, &options.UpdateOptions{Upsert: &upsert}) + if err != nil { + return FromMongoError(err) + } + + if err := s.cache.Delete(ctx, SystemCollection); err != nil { + log.WithField("system", system).Warn("failed to delete system from cache") + } + + return nil +} diff --git a/api/store/mongo/tags.go b/api/store/mongo/tags.go index 7aa805b2a9b..3a7ab5c1772 100644 --- a/api/store/mongo/tags.go +++ b/api/store/mongo/tags.go @@ -2,107 +2,274 @@ package mongo import ( "context" + "errors" - mongodriver "go.mongodb.org/mongo-driver/mongo" + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/models" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" ) -func (s *Store) TagsGet(ctx context.Context, tenant string) ([]string, int, error) { - session, err := s.db.Client().StartSession() +func (s *Store) TagCreate(ctx context.Context, tag *models.Tag) (string, error) { + id := primitive.NewObjectID() + upsert := bson.M{ + "$setOnInsert": bson.M{"_id": id}, + "$set": bson.M{ + "name": tag.Name, + "tenant_id": tag.TenantID, + "created_at": clock.Now(), + "updated_at": clock.Now(), + }, + } + + _, err := s.db. + Collection("tags"). + UpdateOne(ctx, bson.M{"tenant_id": tag.TenantID, "name": tag.Name}, upsert, options.Update().SetUpsert(true)) if err != nil { - return nil, 0, err + return "", FromMongoError(err) } - defer session.EndSession(ctx) - tags, err := session.WithTransaction(ctx, func(sessCtx mongodriver.SessionContext) (interface{}, error) { - deviceTags, _, err := s.DeviceGetTags(sessCtx, tenant) - if err != nil { - return nil, err + return id.Hex(), nil +} + +func (s *Store) TagConflicts(ctx context.Context, tenantID string, target *models.TagConflicts) ([]string, bool, error) { + pipeline := []bson.M{ + { + "$match": bson.M{ + "tenant_id": tenantID, + "$or": []bson.M{{"name": target.Name}}, + }, + }, + } + + cursor, err := s.db.Collection("tags").Aggregate(ctx, pipeline) + if err != nil { + return nil, false, FromMongoError(err) + } + defer cursor.Close(ctx) + + tag := new(models.Tag) + conflicts := make([]string, 0) + + for cursor.Next(ctx) { + if err := cursor.Decode(&tag); err != nil { + return nil, false, FromMongoError(err) + } + + if tag.Name == target.Name { + conflicts = append(conflicts, "name") + } + } + + return conflicts, len(conflicts) > 0, nil +} + +func (s *Store) TagList(ctx context.Context, opts ...store.QueryOption) ([]models.Tag, int, error) { + query := []bson.M{} + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, 0, err + } + } + + count, err := CountAllMatchingDocuments(ctx, s.db.Collection("tags"), query) + if err != nil { + return nil, 0, FromMongoError(err) + } + + if count == 0 { + return []models.Tag{}, 0, nil + } + + cursor, err := s.db.Collection("tags").Aggregate(ctx, query) + if err != nil { + return []models.Tag{}, 0, err + } + defer cursor.Close(ctx) + + tags := make([]models.Tag, 0) + for cursor.Next(ctx) { + tag := new(models.Tag) + if err := cursor.Decode(tag); err != nil { + return []models.Tag{}, 0, err } - keyTags, _, err := s.PublicKeyGetTags(sessCtx, tenant) + tags = append(tags, *tag) + } + + return tags, count, err +} + +func (s *Store) TagResolve(ctx context.Context, resolver store.TagResolver, value string, opts ...store.QueryOption) (*models.Tag, error) { + query := []bson.M{} + switch resolver { + case store.TagIDResolver: + objID, err := primitive.ObjectIDFromHex(value) if err != nil { return nil, err } - ruleTags, _, err := s.FirewallRuleGetTags(sessCtx, tenant) - if err != nil { + query = append(query, bson.M{"$match": bson.M{"_id": objID}}) + case store.TagNameResolver: + query = append(query, bson.M{"$match": bson.M{"name": value}}) + } + + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { return nil, err } + } + + cursor, err := s.db.Collection("tags").Aggregate(ctx, query) + if err != nil { + return nil, FromMongoError(err) + } - tags := []string{} - tags = append(tags, deviceTags...) - tags = append(tags, keyTags...) - tags = append(tags, ruleTags...) + defer cursor.Close(ctx) + cursor.Next(ctx) - return removeDuplicate[string](tags), nil - }) + tag := new(models.Tag) + if err := cursor.Decode(&tag); err != nil { + return nil, FromMongoError(err) + } + + return tag, nil +} + +func (s *Store) TagUpdate(ctx context.Context, tag *models.Tag) error { + bsonBytes, err := bson.Marshal(tag) if err != nil { - return nil, 0, FromMongoError(err) + return FromMongoError(err) + } + + doc := make(bson.M) + if err := bson.Unmarshal(bsonBytes, &doc); err != nil { + return FromMongoError(err) + } + + objID, _ := primitive.ObjectIDFromHex(tag.ID) + doc["_id"] = objID + + filter := bson.M{"_id": objID} + r, err := s.db.Collection("tags").UpdateOne(ctx, filter, bson.M{"$set": doc}) + if err != nil { + return FromMongoError(err) } - return tags.([]string), len(tags.([]string)), nil + if r.MatchedCount < 1 { + return store.ErrNoDocuments + } + + return nil } -func (s *Store) TagsRename(ctx context.Context, tenantID string, oldTag string, newTag string) (int64, error) { - session, err := s.db.Client().StartSession() +func (s *Store) TagPushToTarget(ctx context.Context, id string, target store.TagTarget, targetID string) error { + tag, err := s.TagResolve(ctx, store.TagIDResolver, id) if err != nil { - return int64(0), FromMongoError(err) + return err } - defer session.EndSession(ctx) - count, err := session.WithTransaction(ctx, func(sessCtx mongodriver.SessionContext) (interface{}, error) { - devCount, err := s.DeviceBulkRenameTag(sessCtx, tenantID, oldTag, newTag) - if err != nil { - return int64(0), err - } + collection, filter, attribute, err := getTargetCollectionMetadata(target) + if err != nil { + return err + } + + tagID, _ := primitive.ObjectIDFromHex(tag.ID) + res, err := s.db. + Collection(collection). + UpdateOne(ctx, bson.M{filter: targetID}, bson.M{"$addToSet": bson.M{attribute: tagID}}) + + if res.MatchedCount < 1 { + return store.ErrNoDocuments + } - keyCount, err := s.PublicKeyBulkRenameTag(sessCtx, tenantID, oldTag, newTag) + return FromMongoError(err) +} + +func (s *Store) TagPullFromTarget(ctx context.Context, id string, target store.TagTarget, targetIDs ...string) error { + tag, err := s.TagResolve(ctx, store.TagIDResolver, id) + if err != nil { + return err + } + + collection, filter, attribute, err := getTargetCollectionMetadata(target) + if err != nil { + return err + } + + tagID, _ := primitive.ObjectIDFromHex(tag.ID) + if len(targetIDs) > 0 { + res, err := s.db. + Collection(collection). + UpdateMany(ctx, bson.M{"tenant_id": tag.TenantID, filter: bson.M{"$in": targetIDs}}, bson.M{"$pull": bson.M{attribute: tagID}}) if err != nil { - return int64(0), err + return FromMongoError(err) } - rulCount, err := s.FirewallRuleBulkRenameTag(sessCtx, tenantID, oldTag, newTag) - if err != nil { - return int64(0), err + if res.MatchedCount < 1 { + return store.ErrNoDocuments } - return devCount + keyCount + rulCount, nil - }) - if err != nil { - return int64(0), FromMongoError(err) - } + return nil + } else { // nolint:revive + _, err = s.db.Collection(collection).UpdateMany(ctx, bson.M{"tenant_id": tag.TenantID}, bson.M{"$pull": bson.M{"tags": tagID}}) - return count.(int64), nil + return FromMongoError(err) + } } -func (s *Store) TagsDelete(ctx context.Context, tenantID string, tag string) (int64, error) { +func (s *Store) TagDelete(ctx context.Context, tag *models.Tag) error { session, err := s.db.Client().StartSession() if err != nil { - return int64(0), FromMongoError(err) + return err } defer session.EndSession(ctx) - count, err := session.WithTransaction(ctx, func(sessCtx mongodriver.SessionContext) (interface{}, error) { - devCount, err := s.DeviceBulkDeleteTag(sessCtx, tenantID, tag) + sessionCallback := func(sessCtx mongo.SessionContext) (any, error) { + objID, err := primitive.ObjectIDFromHex(tag.ID) if err != nil { - return int64(0), err + return nil, FromMongoError(err) } - keyCount, err := s.PublicKeyBulkDeleteTag(sessCtx, tenantID, tag) + r, err := s.db.Collection("tags").DeleteOne(sessCtx, bson.M{"_id": objID}) if err != nil { - return int64(0), err + return nil, FromMongoError(err) } - rulCount, err := s.FirewallRuleBulkDeleteTag(sessCtx, tenantID, tag) - if err != nil { - return int64(0), err + if r.DeletedCount < 1 { + return nil, store.ErrNoDocuments } - return devCount + keyCount + rulCount, nil - }) - if err != nil { - return int64(0), FromMongoError(err) + if _, err := s.db.Collection("devices").UpdateMany(sessCtx, bson.M{"tenant_id": tag.TenantID}, bson.M{"$pull": bson.M{"tag_ids": objID}}); err != nil { + return nil, FromMongoError(err) + } + + for _, c := range []string{"public_keys", "firewall_rules"} { + if _, err := s.db.Collection(c).UpdateMany(sessCtx, bson.M{"tenant_id": tag.TenantID}, bson.M{"$pull": bson.M{"filters.tag_ids": objID}}); err != nil { + return nil, FromMongoError(err) + } + } + + return nil, nil } - return count.(int64), nil + _, err = session.WithTransaction(ctx, sessionCallback) + + return err +} + +func getTargetCollectionMetadata(target store.TagTarget) (string, string, string, error) { + switch target { + case store.TagTargetDevice: + return "devices", "uid", "tag_ids", nil + case store.TagTargetPublicKey: + return "public_keys", "fingerprint", "filter.tag_ids", nil + case store.TagTargetFirewallRule: + return "firewall_rules", "_id", "filter.tag_ids", nil + default: + return "", "", "", errors.New("invalid tag target") + } } diff --git a/api/store/mongo/tags_test.go b/api/store/mongo/tags_test.go index f1fc8979055..e169ca411ad 100644 --- a/api/store/mongo/tags_test.go +++ b/api/store/mongo/tags_test.go @@ -1,150 +1,591 @@ -package mongo +package mongo_test import ( "context" + "errors" + "fmt" + "slices" "sort" "testing" + "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/stretchr/testify/assert" + "github.com/shellhub-io/shellhub/api/store" + "github.com/shellhub-io/shellhub/pkg/clock" + clockmocks "github.com/shellhub-io/shellhub/pkg/clock/mocks" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" ) -func TestTagsGet(t *testing.T) { +func TestStore_TagCreate(t *testing.T) { + now := time.Now() + + clockMock := new(clockmocks.Clock) + clockMock.On("Now").Return(now) + clock.DefaultBackend = clockMock + + cases := []struct { + description string + tag *models.Tag + expected error + }{ + { + description: "succeeds when tag data is valid", + tag: &models.Tag{Name: "staging", TenantID: "00000000-0000-4000-0000-000000000000"}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + insertedID, err := s.TagCreate(ctx, tc.tag) + require.Equal(tt, tc.expected, err) + + if err == nil { + objID, _ := primitive.ObjectIDFromHex(insertedID) + + tag := make(map[string]interface{}) + require.NoError(tt, db.Collection("tags").FindOne(ctx, bson.M{"_id": objID}).Decode(tag)) + + require.Equal( + tt, + map[string]interface{}{ + "_id": objID, + "created_at": primitive.NewDateTimeFromTime(now), + "updated_at": primitive.NewDateTimeFromTime(now), + "name": "staging", + "tenant_id": "00000000-0000-4000-0000-000000000000", + }, + tag, + ) + } + }) + } +} + +func TestStore_TagConflicts(t *testing.T) { type Expected struct { - tags []string - len int - err error + conflicts []string + has bool + err error } cases := []struct { description string - tenant string + tenantID string + target *models.TagConflicts fixtures []string expected Expected }{ { - description: "succeeds when tag is found", - tenant: "00000000-0000-4000-0000-000000000000", - fixtures: []string{fixtures.FixturePublicKeys, fixtures.FixtureFirewallRules, fixtures.FixtureDevices}, - expected: Expected{ - tags: []string{"tag-1"}, - len: 1, - err: nil, - }, + description: "no conflicts when target is empty", + tenantID: "00000000-0000-4000-0000-000000000000", + target: &models.TagConflicts{}, + fixtures: []string{fixtureTags}, + expected: Expected{[]string{}, false, nil}, + }, + { + description: "no conflicts with non existing name", + tenantID: "00000000-0000-4000-0000-000000000000", + target: &models.TagConflicts{Name: "nonexistent"}, + fixtures: []string{fixtureTags}, + expected: Expected{[]string{}, false, nil}, + }, + { + description: "no conflicts when namespace is different", + tenantID: "00000000-0000-4001-0000-000000000000", + target: &models.TagConflicts{Name: "production"}, + fixtures: []string{fixtureTags}, + expected: Expected{[]string{}, false, nil}, + }, + { + description: "conflict detected with existing name", + tenantID: "00000000-0000-4000-0000-000000000000", + target: &models.TagConflicts{Name: "production"}, + fixtures: []string{fixtureTags}, + expected: Expected{[]string{"name"}, true, nil}, }, } - db := dbtest.DBServer{} - defer db.Stop() + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + conflicts, has, err := s.TagConflicts(ctx, tc.tenantID, tc.target) + require.Equal(tt, tc.expected, Expected{conflicts, has, err}) + }) + } +} + +func TestStore_TagList(t *testing.T) { + type Expected struct { + tags []models.Tag + count int + err error + } + + cases := []struct { + description string + fixtures []string + options []store.QueryOption + expected Expected + }{ + { + description: "succeeds when no filters applied", + fixtures: []string{fixtureTags}, + options: []store.QueryOption{}, + expected: Expected{ + tags: []models.Tag{ + { + ID: "6791d3c2a62aafaefe821ab3", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "owners", + TenantID: "00000000-0000-4001-0000-000000000000", + }, + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + count: 3, + err: nil, + }, + }, + { + description: "succeeds when tenant filter applied", + fixtures: []string{fixtureTags}, + options: []store.QueryOption{ + func(ctx context.Context) error { + query := ctx.Value("query").(*[]bson.M) + *query = append(*query, bson.M{ + "$match": bson.M{ + "tenant_id": "00000000-0000-4000-0000-000000000000", + }, + }) - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") + return nil + }, + }, + expected: Expected{ + tags: []models.Tag{ + { + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + { + ID: "6791d3be5a201d874c4c2885", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "development", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + }, + count: 2, + err: nil, + }, + }, + } // Due to the non-deterministic order of applying fixtures when dealing with multiple datasets, // we ensure that both the expected and result arrays are correctly sorted. - sort := func(tags []string) { - sort.Slice(tags, func(i, j int) bool { - return tags[i] < tags[j] + sort := func(ns []models.Tag) { + sort.Slice(ns, func(i, j int) bool { + return ns[i].Name < ns[j].Name }) } for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + tags, count, err := s.TagList(ctx, tc.options...) - tags, count, err := mongostore.TagsGet(context.TODO(), tc.tenant) sort(tc.expected.tags) sort(tags) - assert.Equal(t, tc.expected, Expected{tags: tags, len: count, err: err}) + + require.Equal(tt, tc.expected, Expected{tags: tags, count: count, err: err}) }) } } -func TestTagsRename(t *testing.T) { +func TestStore_TagResolve(t *testing.T) { type Expected struct { - count int64 - err error + tag *models.Tag + err error } cases := []struct { description string - tenant string - oldTag string - newTag string + resolver store.TagResolver + value string + options []store.QueryOption fixtures []string expected Expected }{ { - description: "succeeds when tag is found", - tenant: "00000000-0000-4000-0000-000000000000", - oldTag: "tag-1", - newTag: "edited-tag", - fixtures: []string{fixtures.FixturePublicKeys, fixtures.FixtureFirewallRules, fixtures.FixtureDevices}, + description: "fails when invalid ObjectID format", + resolver: store.TagIDResolver, + value: "invalid-id", + options: []store.QueryOption{}, + fixtures: []string{fixtureTags}, expected: Expected{ - count: 6, - err: nil, + tag: nil, + err: primitive.ErrInvalidHex, + }, + }, + { + description: "fails when tag not found by ID", + resolver: store.TagIDResolver, + value: "000000000000000000000000", + options: []store.QueryOption{}, + fixtures: []string{fixtureTags}, + expected: Expected{ + tag: nil, + err: store.ErrNoDocuments, + }, + }, + { + description: "succeeds resolving tag by ID", + resolver: store.TagIDResolver, + value: "6791d3ae04ba86e6d7a0514d", + options: []store.QueryOption{}, + fixtures: []string{fixtureTags}, + expected: Expected{ + tag: &models.Tag{ + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + err: nil, }, }, + { + description: "fails when tag not found by name", + resolver: store.TagNameResolver, + value: "nonexistent", + options: []store.QueryOption{}, + fixtures: []string{fixtureTags}, + expected: Expected{ + tag: nil, + err: store.ErrNoDocuments, + }, + }, + { + description: "succeeds resolving tag by name with tenant filter", + resolver: store.TagNameResolver, + value: "production", + options: []store.QueryOption{ + func(ctx context.Context) error { + query := ctx.Value("query").(*[]bson.M) + *query = append(*query, bson.M{ + "$match": bson.M{ + "tenant_id": "00000000-0000-4000-0000-000000000000", + }, + }) + + return nil + }, + }, + fixtures: []string{fixtureTags}, + expected: Expected{ + tag: &models.Tag{ + ID: "6791d3ae04ba86e6d7a0514d", + CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + Name: "production", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + err: nil, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + tag, err := s.TagResolve(ctx, tc.resolver, tc.value, tc.options...) + require.Equal(tt, tc.expected, Expected{tag: tag, err: err}) + }) } +} + +func TestStore_TagUpdate(t *testing.T) { + cases := []struct { + description string + tag *models.Tag + fixtures []string + expected error + assertChanges func(context.Context) error + }{ + { + description: "fails when tag is not found due to id", + tag: &models.Tag{ + ID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "edited-tag", + }, + fixtures: []string{fixtureTags}, + expected: store.ErrNoDocuments, + assertChanges: nil, + }, + { + description: "succeeds when tag is found", + tag: &models.Tag{ + ID: "6791d3ae04ba86e6d7a0514d", + TenantID: "00000000-0000-4000-0000-000000000000", + Name: "edited-tag", + }, + fixtures: []string{fixtureTags}, + expected: nil, + assertChanges: func(ctx context.Context) error { + tag := new(models.Tag) + objID, _ := primitive.ObjectIDFromHex("6791d3ae04ba86e6d7a0514d") + err := db.Collection("tags").FindOne(ctx, bson.M{"_id": objID}).Decode(tag) + if err != nil { + return err + } - db := dbtest.DBServer{} - defer db.Stop() + if tag.Name != "edited-tag" { + return errors.New("tag name was not updated") + } - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") + return nil + }, + }, + } for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() - count, err := mongostore.TagsRename(context.TODO(), tc.tenant, tc.oldTag, tc.newTag) - assert.Equal(t, tc.expected, Expected{count, err}) + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + err := s.TagUpdate(ctx, tc.tag) + require.Equal(tt, tc.expected, err) + + if err == nil && tc.assertChanges != nil { + require.NoError(tt, tc.assertChanges(ctx)) + } }) } } -func TestTagsDelete(t *testing.T) { - type Expected struct { - count int64 - err error +func TestStore_TagPushToTarget(t *testing.T) { + cases := []struct { + description string + id string + target store.TagTarget + targetID string + fixtures []string + expected error + }{ + { + description: "fails when tag does not exist", + id: "000000000000000000000000", + target: store.TagTargetDevice, + targetID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", + fixtures: []string{fixtureDevices}, + expected: store.ErrNoDocuments, + }, + { + description: "fails when device does not exist", + id: "6791d3be5a201d874c4c2885", + target: store.TagTargetDevice, + targetID: "nonexistent", + fixtures: []string{fixtureTags}, + expected: store.ErrNoDocuments, + }, + { + description: "succeeds to push a tag to device", + id: "6791d3be5a201d874c4c2885", + target: store.TagTargetDevice, + targetID: "5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f", + fixtures: []string{fixtureTags, fixtureDevices}, + expected: nil, + }, } + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + err := s.TagPushToTarget(ctx, tc.id, tc.target, tc.targetID) + require.Equal(tt, tc.expected, err) + + if err != nil { + return + } + + var device struct { + TagIDs []string `bson:"tag_ids"` + } + + require.NoError(tt, db.Collection("devices").FindOne(ctx, bson.M{"uid": tc.targetID}).Decode(&device)) + fmt.Printf("tag_ids: %+v\n", device.TagIDs) + require.True(tt, slices.Contains(device.TagIDs, tc.id)) + }) + } +} + +func TestTagPullFromTarget(t *testing.T) { cases := []struct { description string - tenant string - tag string + id string + target store.TagTarget + targetIDs []string fixtures []string - expected Expected + expected error + }{ + { + description: "fails when tag does not exist", + id: "000000000000000000000000", + target: store.TagTargetDevice, + targetIDs: []string{"5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f"}, + fixtures: []string{fixtureDevices}, + expected: store.ErrNoDocuments, + }, + { + description: "fails when device does not exist", + id: "6791d3ae04ba86e6d7a0514d", + target: store.TagTargetDevice, + targetIDs: []string{"nonexistent"}, + fixtures: []string{fixtureTags}, + expected: store.ErrNoDocuments, + }, + { + description: "succeeds to pull a tag from device", + id: "6791d3ae04ba86e6d7a0514d", + target: store.TagTargetDevice, + targetIDs: []string{"5300530e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809f"}, + fixtures: []string{fixtureTags, fixtureDevices}, + expected: nil, + }, + { + description: "succeeds to pull a tag from all targets when no specific targets provided", + id: "6791d3ae04ba86e6d7a0514d", + target: store.TagTargetDevice, + targetIDs: []string{}, + fixtures: []string{fixtureTags, fixtureDevices}, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + err := s.TagPullFromTarget(ctx, tc.id, tc.target, tc.targetIDs...) + require.Equal(tt, tc.expected, err) + + if err != nil || len(tc.targetIDs) <= 0 { + return + } + + var device struct { + TagIDs []string `bson:"tag_ids"` + } + + require.NoError(tt, db.Collection("devices").FindOne(ctx, bson.M{"uid": tc.targetIDs[0]}).Decode(&device)) + require.False(tt, slices.Contains(device.TagIDs, tc.id)) + }) + } +} + +func TestStore_TagDelete(t *testing.T) { + cases := []struct { + description string + tag *models.Tag + fixtures []string + expected error }{ + { + description: "fails when tag is not found due to id", + tag: &models.Tag{ + ID: "000000000000000000000000", + TenantID: "00000000-0000-4000-0000-000000000000", + }, + fixtures: []string{fixtureTags}, + expected: store.ErrNoDocuments, + }, { description: "succeeds when tag is found", - tenant: "00000000-0000-4000-0000-000000000000", - tag: "tag-1", - fixtures: []string{fixtures.FixturePublicKeys, fixtures.FixtureFirewallRules, fixtures.FixtureDevices}, - expected: Expected{ - count: 6, - err: nil, + tag: &models.Tag{ + ID: "6791d3ae04ba86e6d7a0514d", + TenantID: "00000000-0000-4000-0000-000000000000", }, + fixtures: []string{fixtureTags}, + expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") + err := s.TagDelete(ctx, tc.tag) + require.Equal(tt, tc.expected, err) - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + if err != nil { + return + } - count, err := mongostore.TagsDelete(context.TODO(), tc.tenant, tc.tag) - assert.Equal(t, tc.expected, Expected{count, err}) + objID, _ := primitive.ObjectIDFromHex(tc.tag.ID) + count, err := db.Collection("tags").CountDocuments(ctx, bson.M{"_id": objID}) + require.NoError(tt, err) + require.Equal(tt, int64(0), count) }) } } diff --git a/api/store/mongo/transaction.go b/api/store/mongo/transaction.go new file mode 100644 index 00000000000..6179120bf87 --- /dev/null +++ b/api/store/mongo/transaction.go @@ -0,0 +1,27 @@ +package mongo + +import ( + "context" + + "github.com/shellhub-io/shellhub/api/store" + "go.mongodb.org/mongo-driver/mongo" +) + +func (s *Store) WithTransaction(ctx context.Context, cb store.TransactionCb) error { + session, err := s.db.Client().StartSession() + if err != nil { + return store.ErrStartTransactionFailed + } + defer session.EndSession(ctx) + + // The [session.WithTransaction] function expects a callback that returns an [interface{}] and an error. + // To meet this requirement, we need to wrap our cb so that it always returns nil as the [interface{}], + // along with the error from our callback function. + fn := func(ctx mongo.SessionContext) (interface{}, error) { + return nil, cb(ctx) + } + + _, err = session.WithTransaction(ctx, fn) + + return err +} diff --git a/api/store/mongo/transaction_test.go b/api/store/mongo/transaction_test.go new file mode 100644 index 00000000000..e9816c91fee --- /dev/null +++ b/api/store/mongo/transaction_test.go @@ -0,0 +1,65 @@ +package mongo_test + +import ( + "context" + "errors" + "testing" + + "github.com/shellhub-io/shellhub/api/store" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson" +) + +func TestWithTransaction(t *testing.T) { + cases := []struct { + description string + callback store.TransactionCb + expected error + }{ + { + description: "should abort changes", + callback: func(ctx context.Context) error { + if _, err := db.Collection("users").InsertOne(ctx, bson.M{"_id": 1, "name": "John Doe"}); err != nil { + return err + } + + return errors.New("error") + }, + expected: errors.New("error"), + }, + { + description: "should commit changes", + callback: func(ctx context.Context) error { + if _, err := db.Collection("users").InsertOne(ctx, bson.M{"_id": 1, "name": "John Doe"}); err != nil { + return err + } + + return nil + }, + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + tt.Cleanup(func() { + assert.NoError(tt, srv.Reset()) + }) + + if err := s.WithTransaction(ctx, tc.callback); err != nil { + require.Equal(tt, err, tc.expected) + target := make(map[string]interface{}) + require.Error(tt, db.Collection("users").FindOne(ctx, bson.M{"_id": 1}).Decode(&target)) + _, ok := target["name"] + require.Equal(tt, false, ok) + } else { + target := make(map[string]interface{}) + require.NoError(tt, db.Collection("users").FindOne(ctx, bson.M{"_id": 1}).Decode(&target)) + require.Equal(tt, "John Doe", target["name"]) + } + }) + } +} diff --git a/api/store/mongo/tunnel.go b/api/store/mongo/tunnel.go new file mode 100644 index 00000000000..b9f26c6a44e --- /dev/null +++ b/api/store/mongo/tunnel.go @@ -0,0 +1,16 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" +) + +func (s *Store) TunnelUpdateDeviceUID(ctx context.Context, tenantID, oldUID, newUID string) error { + _, err := s.db.Collection("tunnels").UpdateMany(ctx, bson.M{"namespace": tenantID, "device": oldUID}, bson.M{"$set": bson.M{"device": newUID}}) + if err != nil { + return FromMongoError(err) + } + + return nil +} diff --git a/api/store/mongo/user-invitations.go b/api/store/mongo/user-invitations.go new file mode 100644 index 00000000000..ecfb8051d3b --- /dev/null +++ b/api/store/mongo/user-invitations.go @@ -0,0 +1,43 @@ +package mongo + +import ( + "context" + "strings" + + "github.com/shellhub-io/shellhub/pkg/clock" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo/options" +) + +func (s *Store) UserInvitationsUpsert(ctx context.Context, email string) (string, error) { + now := clock.Now() + + r := s.db.Collection("user_invitations").FindOneAndUpdate( + ctx, + bson.M{ + "email": email, + }, + bson.M{ + "$setOnInsert": bson.M{ + "created_at": now, + "status": "pending", + }, + "$set": bson.M{ + "email": strings.ToLower(email), + "updated_at": now, + }, + "$inc": bson.M{ + "invitations": 1, + }, + }, + options.FindOneAndUpdate().SetUpsert(true).SetReturnDocument(options.After), + ) + + userInvitation := make(bson.M) + if err := r.Decode(&userInvitation); err != nil { + return "", FromMongoError(err) + } + + return userInvitation["_id"].(primitive.ObjectID).Hex(), nil +} diff --git a/api/store/mongo/user-invitations_test.go b/api/store/mongo/user-invitations_test.go new file mode 100644 index 00000000000..3d5203bb2d2 --- /dev/null +++ b/api/store/mongo/user-invitations_test.go @@ -0,0 +1,79 @@ +package mongo_test + +import ( + "context" + "testing" + "time" + + "github.com/shellhub-io/shellhub/pkg/clock" + clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" + "github.com/stretchr/testify/require" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +func TestStore_UserInvitationsUpsert(t *testing.T) { + mockClock := new(clockmock.Clock) + clock.DefaultBackend = mockClock + + now := time.Now() + mockClock.On("Now").Return(now) + + cases := []struct { + description string + email string + fixtures []string + expected map[string]any + }{ + { + description: "succeeds creating new invitation", + email: "john.doe@test.com", + fixtures: []string{}, + expected: map[string]any{ + "email": "john.doe@test.com", + "created_at": primitive.NewDateTimeFromTime(now), + "updated_at": primitive.NewDateTimeFromTime(now), + "invitations": int32(1), + }, + }, + { + description: "succeeds updating existing invitation", + email: "jane.doe@test.com", + fixtures: []string{fixtureUserInvitations}, + expected: map[string]any{ + "email": "jane.doe@test.com", + "updated_at": primitive.NewDateTimeFromTime(now), + "invitations": float64(3), + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + ctx := context.Background() + + require.NoError(tt, srv.Apply(tc.fixtures...)) + tt.Cleanup(func() { + require.NoError(tt, srv.Reset()) + }) + + upsertedID, err := s.UserInvitationsUpsert(ctx, tc.email) + require.NoError(tt, err) + require.NotEmpty(tt, upsertedID) + + objID, _ := primitive.ObjectIDFromHex(upsertedID) + + tmpInvitation := make(map[string]any) + require.NoError(tt, db.Collection("user_invitations").FindOne(ctx, bson.M{"_id": objID}).Decode(&tmpInvitation)) + + require.Equal(tt, objID, tmpInvitation["_id"]) + for field, expectedValue := range tc.expected { + require.Equal(tt, expectedValue, tmpInvitation[field]) + } + + if tc.description == "succeeds updating existing invitation" { + require.NotEqual(tt, primitive.NewDateTimeFromTime(now), tmpInvitation["created_at"]) + } + }) + } +} diff --git a/api/store/mongo/user.go b/api/store/mongo/user.go index 0d0fedce9d3..35482a3c23f 100644 --- a/api/store/mongo/user.go +++ b/api/store/mongo/user.go @@ -2,26 +2,20 @@ package mongo import ( "context" + "time" - "github.com/shellhub-io/shellhub/api/pkg/gateway" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/api/store/mongo/queries" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/models" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/primitive" - "go.mongodb.org/mongo-driver/mongo/options" ) -func (s *Store) UserList(ctx context.Context, pagination paginator.Query, filters []models.Filter) ([]models.User, int, error) { +func (s *Store) UserList(ctx context.Context, opts ...store.QueryOption) ([]models.User, int, error) { query := []bson.M{} - - if tenant := gateway.TenantFromContext(ctx); tenant != nil { - query = append(query, bson.M{ - "$match": bson.M{ - "tenant_id": tenant.ID, - }, - }) + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, 0, err + } } query = append(query, []bson.M{ @@ -45,26 +39,11 @@ func (s *Store) UserList(ctx context.Context, pagination paginator.Query, filter }, }...) - queryMatch, err := queries.BuildFilterQuery(filters) + count, err := CountAllMatchingDocuments(ctx, s.db.Collection("users"), query) if err != nil { return nil, 0, FromMongoError(err) } - if len(queryMatch) > 0 { - query = append(query, queryMatch...) - } - - queryCount := query - queryCount = append(queryCount, bson.M{"$count": "count"}) - count, err := AggregateCount(ctx, s.db.Collection("users"), queryCount) - if err != nil { - return nil, 0, FromMongoError(err) - } - - if pagination.Page > 0 && pagination.PerPage > 0 { - query = append(query, queries.BuildPaginationQuery(pagination)...) - } - users := make([]models.User, 0) cursor, err := s.db.Collection("users").Aggregate(ctx, query) if err != nil { @@ -85,271 +64,189 @@ func (s *Store) UserList(ctx context.Context, pagination paginator.Query, filter return users, count, FromMongoError(err) } -func (s *Store) UserCreate(ctx context.Context, user *models.User) error { - _, err := s.db.Collection("users").InsertOne(ctx, user) +func (s *Store) UserCreate(ctx context.Context, user *models.User) (string, error) { + user.CreatedAt = time.Now() + user.LastLogin = time.Time{} - return FromMongoError(err) -} + // In Cloud environments, there is a flow for inserting a user with a forced ID value. + // Since the struct defines the ID type as string, inserting the struct directly + // would result in a type error in the database. We need to convert the struct to + // BSON and handle the potential string _id conversion to ObjectID. + bsonBytes, err := bson.Marshal(user) + if err != nil { + return "", FromMongoError(err) + } -func (s *Store) UserGetByUsername(ctx context.Context, username string) (*models.User, error) { - user := new(models.User) + doc := make(bson.M) + if err := bson.Unmarshal(bsonBytes, &doc); err != nil { + return "", FromMongoError(err) + } - if err := s.db.Collection("users").FindOne(ctx, bson.M{"username": username}).Decode(&user); err != nil { - return nil, FromMongoError(err) + if idStr, ok := doc["_id"].(string); ok && idStr != "" { + objID, _ := primitive.ObjectIDFromHex(idStr) + doc["_id"] = objID } - return user, nil + r, err := s.db.Collection("users").InsertOne(ctx, doc) + if err != nil { + return "", FromMongoError(err) + } + + return r.InsertedID.(primitive.ObjectID).Hex(), nil } -func (s *Store) UserGetByEmail(ctx context.Context, email string) (*models.User, error) { - user := new(models.User) +func (s *Store) UserResolve(ctx context.Context, resolver store.UserResolver, value string, opts ...store.QueryOption) (*models.User, error) { + matchStage := bson.M{} + switch resolver { + case store.UserIDResolver: + objID, err := primitive.ObjectIDFromHex(value) + if err != nil { + return nil, err + } - if err := s.db.Collection("users").FindOne(ctx, bson.M{"email": email}).Decode(&user); err != nil { - return nil, FromMongoError(err) + matchStage["_id"] = objID + case store.UserEmailResolver: + matchStage["email"] = value + case store.UserUsernameResolver: + matchStage["username"] = value } - return user, nil -} + query := []bson.M{{"$match": matchStage}} + for _, opt := range opts { + if err := opt(context.WithValue(ctx, "query", &query)); err != nil { + return nil, err + } + } -func (s *Store) UserGetByID(ctx context.Context, id string, ns bool) (*models.User, int, error) { - user := new(models.User) - objID, err := primitive.ObjectIDFromHex(id) + cursor, err := s.db.Collection("users").Aggregate(ctx, query) if err != nil { - return nil, 0, err + return nil, FromMongoError(err) } + defer cursor.Close(ctx) - if err := s.db.Collection("users").FindOne(ctx, bson.M{"_id": objID}).Decode(&user); err != nil { - return nil, 0, FromMongoError(err) - } + cursor.Next(ctx) - if !ns { - return user, 0, nil + user := new(models.User) + if err := cursor.Decode(&user); err != nil { + return nil, FromMongoError(err) } - nss := struct { - NamespacesOwned int `bson:"namespacesOwned"` - }{} + return user, nil +} - query := []bson.M{ +func (s *Store) UserConflicts(ctx context.Context, target *models.UserConflicts) ([]string, bool, error) { + pipeline := []bson.M{ { "$match": bson.M{ - "_id": objID, - }, - }, - { - "$addFields": bson.M{ - "_id": bson.M{ - "$toString": "$_id", + "$or": []bson.M{ + {"email": target.Email}, + {"username": target.Username}, }, }, }, - { - "$lookup": bson.M{ - "from": "namespaces", - "localField": "_id", - "foreignField": "owner", - "as": "ns", - }, - }, - { - "$addFields": bson.M{ - "namespacesOwned": bson.M{ - "$size": "$ns", - }, - }, - }, - { - "$project": bson.M{ - "namespacesOwned": 1, - "_id": 0, - }, - }, } - cursor, err := s.db.Collection("users").Aggregate(ctx, query) + cursor, err := s.db.Collection("users").Aggregate(ctx, pipeline) if err != nil { - return nil, 0, FromMongoError(err) + return nil, false, FromMongoError(err) } - defer cursor.Close(ctx) - if !cursor.Next(ctx) { - return nil, 0, FromMongoError(err) - } + user := new(models.UserConflicts) + conflicts := make([]string, 0) + for cursor.Next(ctx) { + if err := cursor.Decode(&user); err != nil { + return nil, false, FromMongoError(err) + } - if err = cursor.Decode(&nss); err != nil { - return nil, 0, FromMongoError(err) + if user.Username == target.Username { + conflicts = append(conflicts, "username") + } + + if user.Email == target.Email { + conflicts = append(conflicts, "email") + } } - return user, nss.NamespacesOwned, nil + return conflicts, len(conflicts) > 0, nil } -func (s *Store) UserUpdateData(ctx context.Context, id string, data models.User) error { - objID, err := primitive.ObjectIDFromHex(id) +func (s *Store) UserUpdate(ctx context.Context, user *models.User) error { + bsonBytes, err := bson.Marshal(user) if err != nil { return FromMongoError(err) } - user, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$set": bson.M{"name": data.Name, "username": data.Username, "email": data.Email, "last_login": data.LastLogin}}) - if err != nil { + doc := make(bson.M) + if err := bson.Unmarshal(bsonBytes, &doc); err != nil { return FromMongoError(err) } - if user.MatchedCount == 0 { - return store.ErrNoDocuments - } - - return nil -} + objID, _ := primitive.ObjectIDFromHex(user.ID) + delete(doc, "_id") -func (s *Store) UserUpdatePassword(ctx context.Context, newPassword string, id string) error { - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return FromMongoError(err) - } - - user, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$set": bson.M{"password": newPassword}}) - if err != nil { - return FromMongoError(err) + // HACK: When a document is read from MongoDB with a null field, Go's BSON driver deserializes + // it as the zero value for that type (e.g., "" for strings). When we marshal this struct back + // to BSON for an update, the empty string is written instead of null. This causes issues with + // unique indexes that have a partial filter expression for string types (see migration 77), + // as multiple documents with "" would violate the uniqueness constraint, while null values + // are allowed to coexist. To preserve null values in the database, we remove these fields + // from the update document when they are empty strings. + if user.Username == "" { + delete(doc, "username") } - if user.MatchedCount < 1 { - return store.ErrNoDocuments + if user.Email == "" { + delete(doc, "email") } - return nil -} - -// UserUpdateAccountStatus sets the 'confirmed' attribute of a user to true. -func (s *Store) UserUpdateAccountStatus(ctx context.Context, id string) error { - objID, err := primitive.ObjectIDFromHex(id) + r, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$set": doc}) if err != nil { return FromMongoError(err) } - user, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$set": bson.M{"confirmed": true}}) - if err != nil { - return err - } - - if user.MatchedCount < 1 { + if r.MatchedCount < 1 { return store.ErrNoDocuments } return nil } -func (s *Store) UserUpdateFromAdmin(ctx context.Context, name string, username string, email string, password string, id string) error { - updatedFields := bson.M{} - - if name != "" { - updatedFields["name"] = name - } - if username != "" { - updatedFields["username"] = username - } - if email != "" { - updatedFields["email"] = email - } - if password != "" { - updatedFields["password"] = password - } - - if len(updatedFields) > 0 { - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return FromMongoError(err) - } - - user, err := s.db.Collection("users").UpdateOne(ctx, bson.M{"_id": objID}, bson.M{"$set": updatedFields}) - if err != nil { - return FromMongoError(err) - } - - if user.ModifiedCount < 1 { - return store.ErrNoDocuments - } - } - - return nil -} - -func (s *Store) UserCreateToken(ctx context.Context, token *models.UserTokenRecover) error { - if _, err := primitive.ObjectIDFromHex(token.User); err != nil { - return err - } - - if _, err := s.db.Collection("recovery_tokens").InsertOne(ctx, token); err != nil { - return err - } - - return nil -} - -func (s *Store) UserGetToken(ctx context.Context, id string) (*models.UserTokenRecover, error) { - token := new(models.UserTokenRecover) - - if err := s.db.Collection("recovery_tokens").FindOne(ctx, bson.M{"user": id}).Decode(&token); err != nil { - return nil, FromMongoError(err) - } - - return token, nil -} - -func (s *Store) UserDeleteTokens(ctx context.Context, id string) error { - if _, err := s.db.Collection("recovery_tokens").DeleteMany(ctx, bson.M{"user": id}); err != nil { - return FromMongoError(err) - } - - return nil -} - -func (s *Store) UserDelete(ctx context.Context, id string) error { - objID, err := primitive.ObjectIDFromHex(id) - if err != nil { - return FromMongoError(err) - } - - user, err := s.db.Collection("users").DeleteOne(ctx, bson.M{"_id": objID}) +func (s *Store) UserDelete(ctx context.Context, user *models.User) error { + objID, _ := primitive.ObjectIDFromHex(user.ID) + r, err := s.db.Collection("users").DeleteOne(ctx, bson.M{"_id": objID}) if err != nil { return FromMongoError(err) } - if user.DeletedCount < 1 { + if r.DeletedCount < 1 { return store.ErrNoDocuments } return nil } -func (s *Store) UserDetachInfo(ctx context.Context, id string) (map[string][]*models.Namespace, error) { - findOptions := options.Find() - - cursor, err := s.db.Collection("namespaces").Find(ctx, bson.M{"members": bson.M{"$elemMatch": bson.M{"id": id}}}, findOptions) +func (s *Store) UserGetInfo(ctx context.Context, id string) (*models.UserInfo, error) { + cursor, err := s.db.Collection("namespaces").Find(ctx, bson.M{"members": bson.M{"$elemMatch": bson.M{"id": id}}}) if err != nil { return nil, FromMongoError(err) } defer cursor.Close(ctx) - namespacesMap := make(map[string][]*models.Namespace, 2) - ownerNamespaceList := make([]*models.Namespace, 0) - membersNamespaceList := make([]*models.Namespace, 0) + userInfo := &models.UserInfo{} for cursor.Next(ctx) { - namespace := new(models.Namespace) - if err := cursor.Decode(&namespace); err != nil { + ns := new(models.Namespace) + if err := cursor.Decode(ns); err != nil { return nil, FromMongoError(err) } - if namespace.Owner != id { - membersNamespaceList = append(membersNamespaceList, namespace) + if ns.Owner == id { + userInfo.OwnedNamespaces = append(userInfo.OwnedNamespaces, *ns) } else { - ownerNamespaceList = append(ownerNamespaceList, namespace) + userInfo.AssociatedNamespaces = append(userInfo.AssociatedNamespaces, *ns) } } - namespacesMap["member"] = membersNamespaceList - namespacesMap["owner"] = ownerNamespaceList - - return namespacesMap, nil + return userInfo, nil } diff --git a/api/store/mongo/user_test.go b/api/store/mongo/user_test.go index 3176680da5e..ca2d9d7c646 100644 --- a/api/store/mongo/user_test.go +++ b/api/store/mongo/user_test.go @@ -1,4 +1,4 @@ -package mongo +package mongo_test import ( "context" @@ -6,14 +6,11 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/dbtest" - "github.com/shellhub-io/shellhub/api/pkg/fixtures" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" - "github.com/shellhub-io/shellhub/pkg/api/paginator" - "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/api/query" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/primitive" ) @@ -27,16 +24,17 @@ func TestUserList(t *testing.T) { cases := []struct { description string - page paginator.Query - filters []models.Filter + opts []store.QueryOption fixtures []string expected Expected }{ { description: "succeeds when users are found", - page: paginator.Query{Page: -1, PerPage: -1}, - filters: nil, - fixtures: []string{fixtures.FixtureUsers}, + opts: []store.QueryOption{ + s.Options().Match(&query.Filters{}), + s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1}), + }, + fixtures: []string{fixtureUsers}, expected: Expected{ users: []models.User{ { @@ -44,15 +42,15 @@ func TestUserList(t *testing.T) { CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), EmailMarketing: true, - Confirmed: true, + Status: models.UserStatusConfirmed, UserData: models.UserData{ Name: "john doe", Username: "john_doe", Email: "john.doe@test.com", }, MaxNamespaces: 0, - UserPassword: models.UserPassword{ - HashedPassword: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", + Password: models.UserPassword{ + Hash: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", }, }, { @@ -60,15 +58,15 @@ func TestUserList(t *testing.T) { CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), LastLogin: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), EmailMarketing: true, - Confirmed: true, + Status: models.UserStatusConfirmed, UserData: models.UserData{ Name: "Jane Smith", Username: "jane_smith", Email: "jane.smith@test.com", }, MaxNamespaces: 3, - UserPassword: models.UserPassword{ - HashedPassword: "a0b8c29f4c8d57e542f5e81d35ebe801fd27f569f116fe670e8962d798512a1d", + Password: models.UserPassword{ + Hash: "a0b8c29f4c8d57e542f5e81d35ebe801fd27f569f116fe670e8962d798512a1d", }, }, { @@ -76,51 +74,60 @@ func TestUserList(t *testing.T) { CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), LastLogin: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), EmailMarketing: true, - Confirmed: true, + Status: models.UserStatusConfirmed, UserData: models.UserData{ Name: "Bob Johnson", Username: "bob_johnson", Email: "bob.johnson@test.com", }, MaxNamespaces: 10, - UserPassword: models.UserPassword{ - HashedPassword: "5f3b3956a1a150b73e6b27e674f27d7aeb01ab1a40c179c3e1aa6026a36655a2", + Password: models.UserPassword{ + Hash: "5f3b3956a1a150b73e6b27e674f27d7aeb01ab1a40c179c3e1aa6026a36655a2", }, }, { ID: "80fdcea1d7299c002f3a67e8", CreatedAt: time.Date(2023, 1, 4, 12, 0, 0, 0, time.UTC), EmailMarketing: false, - Confirmed: false, + Status: models.UserStatusNotConfirmed, UserData: models.UserData{ Name: "Alex Rodriguez", Username: "alex_rodriguez", Email: "alex.rodriguez@test.com", }, MaxNamespaces: 3, - UserPassword: models.UserPassword{ - HashedPassword: "c5093eb98678c7a3324825b84c6b67c1127b93786482ddbbd356e67e29b2763f", + Password: models.UserPassword{ + Hash: "c5093eb98678c7a3324825b84c6b67c1127b93786482ddbbd356e67e29b2763f", + }, + }, + { + ID: "6509e169ae6144b2f56bf288", + CreatedAt: time.Date(2023, 1, 5, 12, 0, 0, 0, time.UTC), + LastLogin: time.Date(2023, 1, 5, 12, 0, 0, 0, time.UTC), + EmailMarketing: true, + Status: models.UserStatusConfirmed, + UserData: models.UserData{ + Name: "Maria Garcia", + Email: "maria.garcia@test.com", + Username: "maria_garcia", + }, + MaxNamespaces: 5, + Password: models.UserPassword{ + Hash: "c2301b2b7e872843b473d2c301e4fb2e6e9f27f2e7a1b6ad44a3b2c97f1670b3", }, }, }, - count: 4, + count: 5, err: nil, }, }, { description: "succeeds with filters", - page: paginator.Query{Page: -1, PerPage: -1}, - filters: []models.Filter{ - { - Type: "property", - Params: &models.PropertyParams{ - Name: "max_namespaces", - Operator: "gt", - Value: "3", - }, - }, + opts: []store.QueryOption{ + s.Options().Match(&query.Filters{Data: []query.Filter{{Type: "property", Params: &query.FilterProperty{Name: "max_namespaces", Operator: "gt", Value: "3"}}}}), + s.Options().Paginate(&query.Paginator{Page: -1, PerPage: -1}), }, - fixtures: []string{fixtures.FixtureUsers}, + fixtures: []string{fixtureUsers}, expected: Expected{ users: []models.User{ { @@ -128,30 +135,40 @@ func TestUserList(t *testing.T) { CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), LastLogin: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), EmailMarketing: true, - Confirmed: true, + Status: models.UserStatusConfirmed, UserData: models.UserData{ Name: "Bob Johnson", Username: "bob_johnson", Email: "bob.johnson@test.com", }, MaxNamespaces: 10, - UserPassword: models.UserPassword{ - HashedPassword: "5f3b3956a1a150b73e6b27e674f27d7aeb01ab1a40c179c3e1aa6026a36655a2", + Password: models.UserPassword{ + Hash: "5f3b3956a1a150b73e6b27e674f27d7aeb01ab1a40c179c3e1aa6026a36655a2", + }, + }, + { + ID: "6509e169ae6144b2f56bf288", + CreatedAt: time.Date(2023, 1, 5, 12, 0, 0, 0, time.UTC), + LastLogin: time.Date(2023, 1, 5, 12, 0, 0, 0, time.UTC), + EmailMarketing: true, + Status: models.UserStatusConfirmed, + UserData: models.UserData{ + Name: "Maria Garcia", + Email: "maria.garcia@test.com", + Username: "maria_garcia", + }, + MaxNamespaces: 5, + Password: models.UserPassword{ + Hash: "c2301b2b7e872843b473d2c301e4fb2e6e9f27f2e7a1b6ad44a3b2c97f1670b3", }, }, }, - count: 1, + count: 2, err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - // Due to the non-deterministic order of applying fixtures when dealing with multiple datasets, // we ensure that both the expected and result arrays are correctly sorted. sort := func(users []models.User) { @@ -162,60 +179,24 @@ func TestUserList(t *testing.T) { for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - users, count, err := mongostore.UserList(context.TODO(), tc.page, tc.filters) - sort(tc.expected.users) - sort(users) - assert.Equal(t, tc.expected, Expected{users: users, count: count, err: err}) - }) - } -} - -func TestUserCreate(t *testing.T) { - cases := []struct { - description string - user *models.User - fixtures []string - expected error - }{ - { - description: "succeeds when data is valid", - user: &models.User{ - ID: "507f1f77bcf86cd799439011", - UserData: models.UserData{ - Name: "john doe", - Username: "john_doe", - Email: "john.doe@test.com", - }, - UserPassword: models.UserPassword{ - HashedPassword: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", - }, - }, - fixtures: []string{}, - expected: nil, - }, - } + ctx := context.Background() - db := dbtest.DBServer{} - defer db.Stop() + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") + users, count, err := s.UserList(ctx, tc.opts...) - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + sort(tc.expected.users) + sort(users) - err := mongostore.UserCreate(context.TODO(), tc.user) - assert.Equal(t, tc.expected, err) + assert.Equal(t, tc.expected, Expected{users: users, count: count, err: err}) }) } } -func TestUserGetByUsername(t *testing.T) { +func TestUserResolve(t *testing.T) { type Expected struct { user *models.User err error @@ -223,534 +204,355 @@ func TestUserGetByUsername(t *testing.T) { cases := []struct { description string - username string + resolver store.UserResolver + value string fixtures []string expected Expected }{ { - description: "fails when user is not found", - username: "nonexistent", - fixtures: []string{fixtures.FixtureUsers}, + description: "fails when invalid ObjectID format", + resolver: store.UserIDResolver, + value: "invalid-id", + fixtures: []string{fixtureUsers}, + expected: Expected{ + user: nil, + err: primitive.ErrInvalidHex, + }, + }, + { + description: "fails when user not found by ID", + resolver: store.UserIDResolver, + value: "507f1f77bcf86cd799439999", + fixtures: []string{fixtureUsers}, expected: Expected{ user: nil, err: store.ErrNoDocuments, }, }, { - description: "succeeds when user is found", - username: "john_doe", - fixtures: []string{fixtures.FixtureUsers}, + description: "succeeds resolving user by ID", + resolver: store.UserIDResolver, + value: "507f1f77bcf86cd799439011", + fixtures: []string{fixtureNamespaces, fixtureUsers}, expected: Expected{ user: &models.User{ ID: "507f1f77bcf86cd799439011", + Status: "confirmed", CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), EmailMarketing: true, - Confirmed: true, + MaxNamespaces: 0, + Password: models.UserPassword{ + Hash: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", + }, UserData: models.UserData{ + Email: "john.doe@test.com", Name: "john doe", Username: "john_doe", - Email: "john.doe@test.com", - }, - MaxNamespaces: 0, - UserPassword: models.UserPassword{ - HashedPassword: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", }, }, err: nil, }, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - user, err := mongostore.UserGetByUsername(context.TODO(), tc.username) - assert.Equal(t, tc.expected, Expected{user: user, err: err}) - }) - } -} - -func TestUserGetByEmail(t *testing.T) { - type Expected struct { - user *models.User - err error - } - - cases := []struct { - description string - email string - fixtures []string - expected Expected - }{ { - description: "fails when email is not found", - email: "nonexistent", - fixtures: []string{fixtures.FixtureUsers}, + description: "fails when user not found by email", + resolver: store.UserEmailResolver, + value: "nonexistent@test.com", + fixtures: []string{fixtureUsers}, expected: Expected{ user: nil, err: store.ErrNoDocuments, }, }, { - description: "succeeds when email is found", - email: "john.doe@test.com", - fixtures: []string{fixtures.FixtureUsers}, + description: "succeeds resolving user by email", + resolver: store.UserEmailResolver, + value: "jane.smith@test.com", + fixtures: []string{fixtureNamespaces, fixtureUsers}, expected: Expected{ user: &models.User{ - ID: "507f1f77bcf86cd799439011", - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + ID: "608f32a2c7351f001f6475e0", + Status: "confirmed", + CreatedAt: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), + LastLogin: time.Date(2023, 1, 2, 12, 0, 0, 0, time.UTC), EmailMarketing: true, - Confirmed: true, - UserData: models.UserData{ - Name: "john doe", - Username: "john_doe", - Email: "john.doe@test.com", + MaxNamespaces: 3, + Password: models.UserPassword{ + Hash: "a0b8c29f4c8d57e542f5e81d35ebe801fd27f569f116fe670e8962d798512a1d", }, - MaxNamespaces: 0, - UserPassword: models.UserPassword{ - HashedPassword: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", + UserData: models.UserData{ + Email: "jane.smith@test.com", + Name: "Jane Smith", + Username: "jane_smith", }, }, err: nil, }, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - user, err := mongostore.UserGetByEmail(context.TODO(), tc.email) - assert.Equal(t, tc.expected, Expected{user: user, err: err}) - }) - } -} - -func TestUserGetByID(t *testing.T) { - type Expected struct { - user *models.User - ns int - err error - } - - cases := []struct { - description string - id string - ns bool - fixtures []string - expected Expected - }{ { - description: "fails when user is not found", - id: "507f1f77bcf86cd7994390bb", - fixtures: []string{fixtures.FixtureUsers, fixtures.FixtureNamespaces}, + description: "fails when user not found by username", + resolver: store.UserUsernameResolver, + value: "nonexistent_user", + fixtures: []string{fixtureUsers}, expected: Expected{ user: nil, - ns: 0, err: store.ErrNoDocuments, }, }, { - description: "succeeds when user is found with ns equal false", - id: "507f1f77bcf86cd799439011", - ns: false, - fixtures: []string{fixtures.FixtureUsers, fixtures.FixtureNamespaces}, + description: "succeeds resolving user by username", + resolver: store.UserUsernameResolver, + value: "bob_johnson", + fixtures: []string{fixtureNamespaces, fixtureUsers}, expected: Expected{ user: &models.User{ - ID: "507f1f77bcf86cd799439011", - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + ID: "709f45b5e812c1002f3a67e7", + Status: "confirmed", + CreatedAt: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), + LastLogin: time.Date(2023, 1, 3, 12, 0, 0, 0, time.UTC), EmailMarketing: true, - Confirmed: true, - UserData: models.UserData{ - Name: "john doe", - Username: "john_doe", - Email: "john.doe@test.com", + MaxNamespaces: 10, + Password: models.UserPassword{ + Hash: "5f3b3956a1a150b73e6b27e674f27d7aeb01ab1a40c179c3e1aa6026a36655a2", }, - MaxNamespaces: 0, - UserPassword: models.UserPassword{ - HashedPassword: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", - }, - }, - ns: 0, - err: nil, - }, - }, - { - description: "succeeds when user is found with ns equal true", - id: "507f1f77bcf86cd799439011", - ns: true, - fixtures: []string{fixtures.FixtureUsers, fixtures.FixtureNamespaces}, - expected: Expected{ - user: &models.User{ - ID: "507f1f77bcf86cd799439011", - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - EmailMarketing: true, - Confirmed: true, UserData: models.UserData{ - Name: "john doe", - Username: "john_doe", - Email: "john.doe@test.com", - }, - MaxNamespaces: 0, - UserPassword: models.UserPassword{ - HashedPassword: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", + Email: "bob.johnson@test.com", + Name: "Bob Johnson", + Username: "bob_johnson", }, }, - ns: 1, err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { assert.NoError(t, srv.Reset()) }) - user, ns, err := mongostore.UserGetByID(context.TODO(), tc.id, tc.ns) - assert.Equal(t, tc.expected, Expected{user: user, ns: ns, err: err}) + user, err := s.UserResolve(context.Background(), tc.resolver, tc.value) + assert.Equal(t, tc.expected, Expected{user: user, err: err}) }) } } -func TestUserUpdateData(t *testing.T) { +func TestUserCreate(t *testing.T) { cases := []struct { description string - id string - data models.User + user *models.User fixtures []string expected error }{ { - description: "fails when user is not found", - id: "000000000000000000000000", - data: models.User{ - LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + description: "succeeds when data is valid", + user: &models.User{ UserData: models.UserData{ - Name: "edited name", - Username: "edited_name", - Email: "edited@test.com", + Name: "john doe", + Username: "john_doe", + Email: "john.doe@test.com", }, - }, - fixtures: []string{fixtures.FixtureUsers}, - expected: store.ErrNoDocuments, - }, - { - description: "succeeds when user is found", - id: "507f1f77bcf86cd799439011", - fixtures: []string{fixtures.FixtureUsers}, - data: models.User{ - LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UserData: models.UserData{ - Name: "edited name", - Username: "edited_name", - Email: "edited@test.com", + Password: models.UserPassword{ + Hash: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", }, }, + fixtures: []string{}, expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() + + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) - err := mongostore.UserUpdateData(context.TODO(), tc.id, tc.data) + insertedID, err := s.UserCreate(ctx, tc.user) assert.Equal(t, tc.expected, err) + assert.NotEmpty(t, insertedID) }) } } -func TestUserUpdatePassword(t *testing.T) { +func TestUserConflicts(t *testing.T) { + type Expected struct { + conflicts []string + ok bool + err error + } + cases := []struct { description string - id string - password string + target *models.UserConflicts fixtures []string - expected error + expected Expected }{ { - description: "fails when user id is not valid", - id: "invalid", - password: "other_password", - fixtures: []string{fixtures.FixtureUsers}, - expected: store.ErrInvalidHex, - }, - { - description: "fails when user is not found", - id: "000000000000000000000000", - password: "other_password", - fixtures: []string{fixtures.FixtureUsers}, - expected: store.ErrNoDocuments, + description: "no conflicts when target is empty", + target: &models.UserConflicts{}, + fixtures: []string{fixtureUsers}, + expected: Expected{[]string{}, false, nil}, }, { - description: "succeeds when user is found", - id: "507f1f77bcf86cd799439011", - password: "other_password", - fixtures: []string{fixtures.FixtureUsers}, - expected: nil, + description: "no conflicts with non existing email", + target: &models.UserConflicts{Email: "other@test.com"}, + fixtures: []string{fixtureUsers}, + expected: Expected{[]string{}, false, nil}, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.UserUpdatePassword(context.TODO(), tc.password, tc.id) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestUserUpdateAccountStatus(t *testing.T) { - cases := []struct { - description string - id string - fixtures []string - expected error - }{ { - description: "fails when user id is not valid", - id: "invalid", - fixtures: []string{fixtures.FixtureUsers}, - expected: store.ErrInvalidHex, + description: "no conflicts with non existing username", + target: &models.UserConflicts{Username: "other"}, + fixtures: []string{fixtureUsers}, + expected: Expected{[]string{}, false, nil}, }, { - description: "fails when user is not found", - id: "000000000000000000000000", - fixtures: []string{fixtures.FixtureUsers}, - expected: store.ErrNoDocuments, + description: "no conflicts with non existing username and email", + target: &models.UserConflicts{Email: "other@test.com", Username: "other"}, + fixtures: []string{fixtureUsers}, + expected: Expected{[]string{}, false, nil}, }, { - description: "succeeds when user is found", - id: "80fdcea1d7299c002f3a67e8", - fixtures: []string{fixtures.FixtureUsers}, - expected: nil, + description: "conflict detected with existing email", + target: &models.UserConflicts{Email: "john.doe@test.com"}, + fixtures: []string{fixtureUsers}, + expected: Expected{[]string{"email"}, true, nil}, }, - } - - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.UserUpdateAccountStatus(context.TODO(), tc.id) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestUserUpdateFromAdmin(t *testing.T) { - cases := []struct { - description string - id string - name string - username string - email string - password string - fixtures []string - expected error - }{ { - description: "fails when user is not found", - id: "000000000000000000000000", - name: "other name", - username: "other_name", - email: "other.email@test.com", - password: "other_password", - fixtures: []string{fixtures.FixtureUsers}, - expected: store.ErrNoDocuments, + description: "conflict detected with existing username", + target: &models.UserConflicts{Username: "john_doe"}, + fixtures: []string{fixtureUsers}, + expected: Expected{[]string{"username"}, true, nil}, }, { - description: "succeeds when user is found", - id: "507f1f77bcf86cd799439011", - name: "other name", - username: "other_name", - email: "other.email@test.com", - password: "other_password", - fixtures: []string{fixtures.FixtureUsers}, - expected: nil, + description: "conflict detected with existing username and email", + target: &models.UserConflicts{Email: "john.doe@test.com", Username: "john_doe"}, + fixtures: []string{fixtureUsers}, + expected: Expected{[]string{"username", "email"}, true, nil}, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.UserUpdateFromAdmin(context.TODO(), tc.name, tc.username, tc.email, tc.password, tc.id) - assert.Equal(t, tc.expected, err) - }) - } -} - -func TestUserCreateToken(t *testing.T) { - cases := []struct { - description string - token *models.UserTokenRecover - fixtures []string - expected error - }{ - { - description: "succeeds when data is valid", - token: &models.UserTokenRecover{ - Token: "token", - User: "507f1f77bcf86cd799439011", - }, - fixtures: []string{}, - expected: nil, - }, - } - - db := dbtest.DBServer{} - defer db.Stop() + ctx := context.Background() - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck - - err := mongostore.UserCreateToken(context.TODO(), tc.token) - assert.Equal(t, tc.expected, err) + conflicts, ok, err := s.UserConflicts(ctx, tc.target) + require.Equal(t, tc.expected, Expected{conflicts, ok, err}) }) } } -func TestUserTokenGet(t *testing.T) { +func TestUserUpdate(t *testing.T) { type Expected struct { - token *models.UserTokenRecover - err error + user *models.User + err error } cases := []struct { description string - id string + user *models.User fixtures []string expected Expected }{ { description: "fails when user is not found", - id: "000000000000000000000000", - fixtures: []string{fixtures.FixtureUsers, fixtures.FixtureRecoveryTokens}, + user: &models.User{ + ID: "000000000000000000000000", + }, + fixtures: []string{fixtureUsers}, expected: Expected{ - token: nil, - err: store.ErrNoDocuments, + user: nil, + err: store.ErrNoDocuments, }, }, { - description: "succeeds when user is found", - id: "507f1f77bcf86cd799439011", - fixtures: []string{fixtures.FixtureUsers, fixtures.FixtureRecoveryTokens}, + description: "succeeds when updating string values", + user: &models.User{ + ID: "507f1f77bcf86cd799439011", + LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UserData: models.UserData{ + Name: "New Value", + Email: "new.value@test.com", + Username: "john_doe", + }, + Status: models.UserStatusNotConfirmed, + Password: models.UserPassword{Hash: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4"}, + }, + fixtures: []string{fixtureUsers}, expected: Expected{ - token: &models.UserTokenRecover{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - Token: "token", - User: "507f1f77bcf86cd799439011", + user: &models.User{ + LastLogin: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), + UserData: models.UserData{ + Name: "New Value", + Email: "new.value@test.com", + Username: "john_doe", + }, + Status: models.UserStatusNotConfirmed, + Password: models.UserPassword{Hash: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4"}, + }, + err: nil, + }, + }, + { + description: "succeeds when updating time values", + user: &models.User{ + ID: "507f1f77bcf86cd799439011", + LastLogin: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC), + UserData: models.UserData{ + Name: "john doe", + Email: "john.doe@test.com", + Username: "john_doe", + }, + Status: models.UserStatusConfirmed, + Password: models.UserPassword{Hash: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4"}, + }, + fixtures: []string{fixtureUsers}, + expected: Expected{ + user: &models.User{ + LastLogin: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC), + UserData: models.UserData{ + Name: "john doe", + Email: "john.doe@test.com", + Username: "john_doe", + }, + Status: models.UserStatusConfirmed, + Password: models.UserPassword{Hash: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4"}, }, err: nil, }, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - token, err := mongostore.UserGetToken(context.TODO(), tc.id) - assert.Equal(t, tc.expected, Expected{token: token, err: err}) - }) - } -} + require.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { require.NoError(t, srv.Reset()) }) -func TestUserDeleteTokens(t *testing.T) { - cases := []struct { - description string - id string - fixtures []string - expected error - }{ - { - description: "succeeds when user is found", - id: "507f1f77bcf86cd799439011", - fixtures: []string{fixtures.FixtureUsers, fixtures.FixtureRecoveryTokens}, - expected: nil, - }, - } + if err := s.UserUpdate(ctx, tc.user); err != nil { + require.Equal(t, tc.expected.err, err) - db := dbtest.DBServer{} - defer db.Stop() + return + } - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") + id, err := primitive.ObjectIDFromHex(tc.user.ID) + require.NoError(t, err) - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + user := new(models.User) + require.NoError(t, db.Collection("users").FindOne(ctx, bson.M{"_id": id}).Decode(user)) - err := mongostore.UserDeleteTokens(context.TODO(), tc.id) - assert.Equal(t, tc.expected, err) + require.Equal(t, tc.expected.user.LastLogin, user.LastLogin) + require.Equal(t, tc.expected.user.Name, user.Name) + require.Equal(t, tc.expected.user.Email, user.Email) + require.Equal(t, tc.expected.user.Status, user.Status) + require.Equal(t, tc.expected.user.Username, user.Username) + require.Equal(t, tc.expected.user.Password.Hash, user.Password.Hash) }) } } @@ -758,146 +560,39 @@ func TestUserDeleteTokens(t *testing.T) { func TestUserDelete(t *testing.T) { cases := []struct { description string - id string + user *models.User fixtures []string expected error }{ { description: "fails when user is not found", - id: "000000000000000000000000", - fixtures: []string{fixtures.FixtureUsers}, - expected: store.ErrNoDocuments, + user: &models.User{ + ID: "000000000000000000000000", + }, + fixtures: []string{fixtureUsers}, + expected: store.ErrNoDocuments, }, { description: "succeeds when user is found", - id: "507f1f77bcf86cd799439011", - fixtures: []string{fixtures.FixtureUsers}, - expected: nil, + user: &models.User{ + ID: "507f1f77bcf86cd799439011", + }, + fixtures: []string{fixtureUsers}, + expected: nil, }, } - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - fixtures.Init(db.Host, "test") - for _, tc := range cases { t.Run(tc.description, func(t *testing.T) { - assert.NoError(t, fixtures.Apply(tc.fixtures...)) - defer fixtures.Teardown() // nolint: errcheck + ctx := context.Background() - err := mongostore.UserDelete(context.TODO(), tc.id) + assert.NoError(t, srv.Apply(tc.fixtures...)) + t.Cleanup(func() { + assert.NoError(t, srv.Reset()) + }) + + err := s.UserDelete(ctx, tc.user) assert.Equal(t, tc.expected, err) }) } } - -func TestUserDetachInfo(t *testing.T) { - db := dbtest.DBServer{} - defer db.Stop() - - mongostore := NewStore(db.Client().Database("test"), cache.NewNullCache()) - - user := models.User{ - ID: "60af83d418d2dc3007cd445c", - UserData: models.UserData{ - Name: "name", - Username: "username", - Email: "user@email.com", - }, - UserPassword: models.UserPassword{ - HashedPassword: "fcf730b6d95236ecd3c9fc2d92d7b6b2bb061514961aec041d6c7a7192f592e4", - }, - } - - objID, err := primitive.ObjectIDFromHex(user.ID) - - assert.NoError(t, err) - - _, _ = db.Client().Database("test").Collection("users").InsertOne(context.TODO(), bson.M{ - "_id": objID, - "name": user.Name, - "username": user.Username, - "password": user.HashedPassword, - "email": user.Email, - }) - - namespacesOwner := []*models.Namespace{ - { - Owner: user.ID, - Name: "ns2", - Members: []models.Member{ - { - ID: user.ID, - Role: guard.RoleOwner, - }, - }, - }, - { - Owner: user.ID, - Name: "ns4", - Members: []models.Member{ - { - ID: user.ID, - Role: guard.RoleOwner, - }, - }, - }, - } - - namespacesMember := []*models.Namespace{ - { - Owner: "id2", - Name: "ns1", - Members: []models.Member{ - { - ID: user.ID, - Role: guard.RoleObserver, - }, - }, - }, - { - Owner: "id2", - Name: "ns3", - Members: []models.Member{ - { - ID: user.ID, - Role: guard.RoleObserver, - }, - }, - }, - { - Owner: "id2", - Name: "ns5", - Members: []models.Member{ - { - ID: user.ID, - Role: guard.RoleObserver, - }, - }, - }, - } - - for _, n := range namespacesOwner { - inserted, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), n) - t.Log(inserted.InsertedID) - assert.NoError(t, err) - } - - for _, n := range namespacesMember { - inserted, err := db.Client().Database("test").Collection("namespaces").InsertOne(context.TODO(), n) - t.Log(inserted.InsertedID) - assert.NoError(t, err) - } - - u, err := mongostore.UserGetByUsername(context.TODO(), "username") - assert.NoError(t, err) - assert.Equal(t, user.Username, u.Username) - - namespacesMap, err := mongostore.UserDetachInfo(context.TODO(), user.ID) - - assert.NoError(t, err) - assert.Equal(t, namespacesMap["owner"], namespacesOwner) - assert.Equal(t, namespacesMap["member"], namespacesMember) -} diff --git a/api/store/mongo/utils.go b/api/store/mongo/utils.go index d5b61540462..de4df65a884 100644 --- a/api/store/mongo/utils.go +++ b/api/store/mongo/utils.go @@ -3,6 +3,7 @@ package mongo import ( "context" "io" + "slices" "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/pkg/errors" @@ -11,28 +12,40 @@ import ( "go.mongodb.org/mongo-driver/mongo" ) -// AggregateCount takes a pipeline and count the results. -func AggregateCount(ctx context.Context, coll *mongo.Collection, pipeline []bson.M) (int, error) { - resp := struct { - Count int `bson:"count"` - }{} +func CountAllMatchingDocuments(ctx context.Context, collection *mongo.Collection, basePipeline []bson.M) (int, error) { + excludeStages := []string{"$skip", "$limit", "$sort"} + countPipeline := make([]bson.M, 0) - cursor, err := coll.Aggregate(ctx, pipeline) + for _, stage := range basePipeline { + filtered := make(bson.M) + for key, value := range stage { + if !slices.Contains(excludeStages, key) { + filtered[key] = value + } + } + + if len(filtered) > 0 { + countPipeline = append(countPipeline, filtered) + } + } + + countPipeline = append(countPipeline, bson.M{"$count": "count"}) + cursor, err := collection.Aggregate(ctx, countPipeline) if err != nil { return 0, err } - defer cursor.Close(ctx) if !cursor.Next(ctx) { return 0, nil } - if err = cursor.Decode(&resp); err != nil { + result := make(map[string]any) + if err = cursor.Decode(&result); err != nil { return 0, err } - return resp.Count, nil + return int(result["count"].(int32)), nil } // ErrLayer is an error level. Each error defined at this level, is container to it. @@ -58,17 +71,3 @@ func FromMongoError(err error) error { return errors.Wrap(ErrMongo, err) } } - -// removeDuplicate removes duplicate elements from a slice while maintaining the original order. -func removeDuplicate[T comparable](slice []T) []T { - allKeys := make(map[T]bool) - list := []T{} - for _, item := range slice { - if _, value := allKeys[item]; !value { - allKeys[item] = true - list = append(list, item) - } - } - - return list -} diff --git a/api/store/namespace.go b/api/store/namespace.go index 93139d44b18..b1efbdeac7c 100644 --- a/api/store/namespace.go +++ b/api/store/namespace.go @@ -3,22 +3,50 @@ package store import ( "context" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/models" ) +type NamespaceResolver uint + +const ( + NamespaceTenantIDResolver NamespaceResolver = iota + 1 + NamespaceNameResolver +) + type NamespaceStore interface { - NamespaceList(ctx context.Context, pagination paginator.Query, filters []models.Filter, export bool) ([]models.Namespace, int, error) - NamespaceGet(ctx context.Context, tenantID string) (*models.Namespace, error) - NamespaceGetByName(ctx context.Context, name string) (*models.Namespace, error) - NamespaceCreate(ctx context.Context, namespace *models.Namespace) (*models.Namespace, error) - NamespaceRename(ctx context.Context, tenantID string, name string) (*models.Namespace, error) - NamespaceUpdate(ctx context.Context, tenantID string, namespace *models.Namespace) error - NamespaceDelete(ctx context.Context, tenantID string) error - NamespaceAddMember(ctx context.Context, tenantID string, memberID string, memberRole string) (*models.Namespace, error) - NamespaceRemoveMember(ctx context.Context, tenantID string, memberID string) (*models.Namespace, error) - NamespaceEditMember(ctx context.Context, tenantID string, memberID string, memberNewRole string) error - NamespaceGetFirst(ctx context.Context, id string) (*models.Namespace, error) - NamespaceSetSessionRecord(ctx context.Context, sessionRecord bool, tenantID string) error - NamespaceGetSessionRecord(ctx context.Context, tenantID string) (bool, error) + // NamespaceList retrieves a list of namespaces based on the provided filters and pagination settings. + // If the user ID is available in the context, it will only match namespaces that the user is a member + // of and does not have a pending membership status. A list of options can be passed to inject + // additional data into each namespace in the list. + // + // It returns the list of namespaces, the total count of matching documents (ignoring pagination), and + // an error if any. + NamespaceList(ctx context.Context, opts ...QueryOption) ([]models.Namespace, int, error) + + // NamespaceResolve fetches a namespace using a specific resolver. + // + // It returns the resolved namespace if found and an error, if any. + NamespaceResolve(ctx context.Context, resolver NamespaceResolver, value string) (*models.Namespace, error) + + // NamespaceGetPreferred retrieves the user's preferred namespace. If the user has no preferred namespace it returns + // the first namespace where the user is a member (typically the first one the user was added to). A list of options + // can be passed via `opts` to inject additional data into the namespace. + // + // It returns the namespace or an error if any. + NamespaceGetPreferred(ctx context.Context, userID string) (*models.Namespace, error) + + NamespaceCreate(ctx context.Context, namespace *models.Namespace) (string, error) + + NamespaceConflicts(ctx context.Context, target *models.NamespaceConflicts) (conflicts []string, has bool, err error) + + // NamespaceUpdate updates a namespace. It returns an error, if any, or store.ErrNoDocuments if the + // namespace does not exist. + NamespaceUpdate(ctx context.Context, namespace *models.Namespace) error + + // NamespaceIncrementDeviceCount atomically increments or decrements the device count for a specific status within a namespace. + // Returns [ErrNoDocuments] if the namespace is not found. + NamespaceIncrementDeviceCount(ctx context.Context, tenantID string, status models.DeviceStatus, count int64) error + + NamespaceDelete(ctx context.Context, namespace *models.Namespace) error + NamespaceDeleteMany(ctx context.Context, tenantIDs []string) (int64, error) } diff --git a/api/store/publickey.go b/api/store/publickey.go index e36e852ba38..4e50b3d7f26 100644 --- a/api/store/publickey.go +++ b/api/store/publickey.go @@ -3,14 +3,24 @@ package store import ( "context" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/models" ) +type PublicKeyResolver int + +const ( + PublicKeyFingerprintResolver PublicKeyResolver = iota +) + type PublicKeyStore interface { - PublicKeyList(ctx context.Context, pagination paginator.Query) ([]models.PublicKey, int, error) - PublicKeyGet(ctx context.Context, fingerprint string, tenantID string) (*models.PublicKey, error) - PublicKeyCreate(ctx context.Context, key *models.PublicKey) error - PublicKeyUpdate(ctx context.Context, fingerprint string, tenantID string, key *models.PublicKeyUpdate) (*models.PublicKey, error) - PublicKeyDelete(ctx context.Context, fingerprint string, tenantID string) error + // PublicKeyList retrieves a list of public keys with optional filtering and pagination. + PublicKeyList(ctx context.Context, opts ...QueryOption) ([]models.PublicKey, int, error) + // PublicKeyResolve retrieves a public key by the given resolver type and value. + PublicKeyResolve(ctx context.Context, resolver PublicKeyResolver, value string, opts ...QueryOption) (*models.PublicKey, error) + // PublicKeyCreate creates a new public key and returns its fingerprint. + PublicKeyCreate(ctx context.Context, key *models.PublicKey) (string, error) + // PublicKeyUpdate updates an existing public key. + PublicKeyUpdate(ctx context.Context, publicKey *models.PublicKey) error + // PublicKeyDelete removes a public key. + PublicKeyDelete(ctx context.Context, publicKey *models.PublicKey) error } diff --git a/api/store/publickey_tags.go b/api/store/publickey_tags.go deleted file mode 100644 index 9c2abc6bdee..00000000000 --- a/api/store/publickey_tags.go +++ /dev/null @@ -1,35 +0,0 @@ -package store - -import "context" - -type PublicKeyTagsStore interface { - // PublicKeyPushTag adds a new tag to the list of tags for a device with the specified UID. - // Returns an error if any issues occur during the tag addition or ErrNoDocuments when matching documents are found. - // - // The tag need to exist on a device. If it is not true, the action will fail. - PublicKeyPushTag(ctx context.Context, tenant, fingerprint, tag string) error - - // PublicKeyPullTag removes a tag from the list of tags for a device with the specified UID. - // Returns an error if any issues occur during the tag removal or ErrNoDocuments when matching documents are found. - // - // To remove a tag, that tag needs to exist on a device. If it is not, the action will fail. - PublicKeyPullTag(ctx context.Context, tenant, fingerprint, tag string) error - - // PublicKeySetTags sets the tags for a public key with the specified fingerprint and tenant. - // It returns the number of matching documents, the number of modified documents, and any encountered errors. - // - // All tags need to exist on a device. If it is not true, the update action will fail. - PublicKeySetTags(ctx context.Context, tenant, fingerprint string, tags []string) (matchedCount int64, updatedCount int64, err error) - - // PublicKeyBulkRenameTag replaces all occurrences of the old tag with the new tag for all public keys to the specified tenant. - // Returns the number of documents updated and an error if any issues occur during the tag renaming. - PublicKeyBulkRenameTag(ctx context.Context, tenant, currentTag, newTag string) (updatedCount int64, err error) - - // PublicKeyBulkDeleteTag removes a tag from all public keys belonging to the specified tenant. - // Returns the number of documents updated and an error if any issues occur during the tag deletion. - PublicKeyBulkDeleteTag(ctx context.Context, tenant, tag string) (updatedCount int64, err error) - - // PublicKeyGetTags retrieves all tags associated with the tenant. - // Returns the tags, the number of tags, and an error if any issues occur. - PublicKeyGetTags(ctx context.Context, tenant string) (tag []string, size int, err error) -} diff --git a/api/store/query-options.go b/api/store/query-options.go new file mode 100644 index 00000000000..d944e7d624f --- /dev/null +++ b/api/store/query-options.go @@ -0,0 +1,31 @@ +package store + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/models" +) + +type NamespaceQueryOption func(ctx context.Context, ns *models.Namespace) error + +type QueryOption func(ctx context.Context) error + +type QueryOptions interface { + // InNamespace matches a document that belongs to the provided namespace + InNamespace(tenantID string) QueryOption + + // WithDeviceStatus matches a device with the provided status + WithDeviceStatus(models.DeviceStatus) QueryOption + + // Match applies the provided query filters to match records + Match(fs *query.Filters) QueryOption + + // Paginate applies pagination to limit the number of records returned. + // If paginator is nil, no pagination is applied. + Paginate(paginator *query.Paginator) QueryOption + + // Sort applies sorting criteria to order the returned records. + // If sorter is nil, no specific sorting is applied. + Sort(sorter *query.Sorter) QueryOption +} diff --git a/api/store/session.go b/api/store/session.go index 2c09b11a4b5..062e8bde9b3 100644 --- a/api/store/session.go +++ b/api/store/session.go @@ -2,23 +2,45 @@ package store import ( "context" - "time" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/models" ) +type SessionResolver uint + +const ( + SessionUIDResolver SessionResolver = iota + 1 +) + type SessionStore interface { - SessionList(ctx context.Context, pagination paginator.Query) ([]models.Session, int, error) - SessionGet(ctx context.Context, uid models.UID) (*models.Session, error) - SessionCreate(ctx context.Context, session models.Session) (*models.Session, error) - SessionSetAuthenticated(ctx context.Context, uid models.UID, authenticated bool) error - SessionSetLastSeen(ctx context.Context, uid models.UID) error - SessionDeleteActives(ctx context.Context, uid models.UID) error - SessionCreateRecordFrame(ctx context.Context, uid models.UID, recordSession *models.RecordedSession) error + // SessionList retrieves a list of sessions based on the provided filters and pagination settings. + // It returns the list of sessions, the total count of matching documents, and an error if any. + SessionList(ctx context.Context, opts ...QueryOption) ([]models.Session, int, error) + // SessionResolve fetches a session using a specific resolver. + // It returns the resolved session if found and an error, if any. + SessionResolve(ctx context.Context, resolver SessionResolver, value string, opts ...QueryOption) (*models.Session, error) + // SessionCreate creates a new session. It returns the inserted UID and an error if any. + SessionCreate(ctx context.Context, session models.Session) (string, error) + // SessionUpdate updates a session. It returns an error if any. + SessionUpdate(ctx context.Context, session *models.Session) error + + // ActiveSessionCreate creates an active session entry. It returns an error if any. + ActiveSessionCreate(ctx context.Context, session *models.Session) error + // ActiveSessionResolve fetches an active session using a specific resolver. It returns the active session if found and an error, if any. + ActiveSessionResolve(ctx context.Context, resolver SessionResolver, value string) (*models.ActiveSession, error) + // ActiveSessionUpdate updates an active session. It returns an error if any. + ActiveSessionUpdate(ctx context.Context, activeSession *models.ActiveSession) error + + // ActiveSessionDelete removes active session entries. It returns an error if any. + ActiveSessionDelete(ctx context.Context, uid models.UID) error + + // SessionEventsCreate creates a session event. It returns an error if any. + SessionEventsCreate(ctx context.Context, event *models.SessionEvent) error + // SessionEventsList retrieves session events based on filters. It returns the list of events, total count, and an error if any. + SessionEventsList(ctx context.Context, uid models.UID, seat int, event models.SessionEventType, opts ...QueryOption) ([]models.SessionEvent, int, error) + // SessionEventsDelete removes session events based on filters. It returns an error if any. + SessionEventsDelete(ctx context.Context, uid models.UID, seat int, event models.SessionEventType) error + + // SessionUpdateDeviceUID updates device UID references across sessions. It returns an error if any. SessionUpdateDeviceUID(ctx context.Context, oldUID models.UID, newUID models.UID) error - SessionGetRecordFrame(ctx context.Context, uid models.UID) ([]models.RecordedSession, int, error) - SessionDeleteRecordFrame(ctx context.Context, uid models.UID) error - SessionDeleteRecordFrameByDate(ctx context.Context, lte time.Time) (deletedCount int64, updatedCount int64, err error) - SessionSetRecorded(ctx context.Context, uid models.UID, recorded bool) error } diff --git a/api/store/stats.go b/api/store/stats.go index 5e0c5b24fbd..77aa67747f4 100644 --- a/api/store/stats.go +++ b/api/store/stats.go @@ -7,5 +7,7 @@ import ( ) type StatsStore interface { - GetStats(ctx context.Context) (*models.Stats, error) + // GetStats retrieves device and session statistics. If tenantID is provided, + // statistics are filtered to that tenant. If empty, returns global statistics. + GetStats(ctx context.Context, tenantID string) (*models.Stats, error) } diff --git a/api/store/store.go b/api/store/store.go index 04ee3587e7c..f05b8a75a0e 100644 --- a/api/store/store.go +++ b/api/store/store.go @@ -2,19 +2,21 @@ package store //go:generate mockery --name Store --filename store.go type Store interface { - AnnouncementsStore TagsStore DeviceStore - DeviceTagsStore SessionStore UserStore - FirewallStore - FirewallTagsStore + UserInvitationsStore NamespaceStore + MemberStore + MembershipInvitationsStore PublicKeyStore - PublicKeyTagsStore PrivateKeyStore - LicenseStore StatsStore - MFAStore + APIKeyStore + TransactionStore + SystemStore + TunnelStore + + Options() QueryOptions } diff --git a/api/store/system.go b/api/store/system.go new file mode 100644 index 00000000000..ee7b19b6eeb --- /dev/null +++ b/api/store/system.go @@ -0,0 +1,12 @@ +package store + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" +) + +type SystemStore interface { + SystemGet(ctx context.Context) (*models.System, error) + SystemSet(ctx context.Context, system *models.System) error +} diff --git a/api/store/tags.go b/api/store/tags.go index 4894f572f3b..e28e45f6d8a 100644 --- a/api/store/tags.go +++ b/api/store/tags.go @@ -1,21 +1,75 @@ package store -import "context" +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" +) + +type TagResolver uint + +const ( + TagIDResolver TagResolver = iota + 1 + TagNameResolver +) + +type TagTarget int + +const ( + TagTargetDevice TagTarget = iota + 1 + TagTargetPublicKey + TagTargetFirewallRule +) + +func TagTargets() []TagTarget { + return []TagTarget{TagTargetDevice, TagTargetPublicKey, TagTargetFirewallRule} +} type TagsStore interface { - // TagsGet retrieves all tags associated with the specified tenant. It functions by invoking "[document]GetTags" - // for each document that implements tags. - // Returns the tags, the count of unique tags, and an error if any issues arise. - // It also filters the returned tags, removing any duplicates. - TagsGet(ctx context.Context, tenant string) (tags []string, n int, err error) - - // TagsRename replaces all occurrences of the old tag with the new tag for all documents associated with the specified tenant. - // It operates by invoking "[document]BulkRenameTag" for each document that implements tags. - // Returns the count of documents updated and an error if any issues arise during the tag renaming. - TagsRename(ctx context.Context, tenant string, oldTag string, newTag string) (updatedCount int64, err error) - - // TagsDelete removes a tag from all documents associated with the specified tenant. It operates by - // invoking "[document]BulkDeleteTag" for each document that implements tags. - // Returns the count of documents updated and an error if any issues arise during the tag deletion. - TagsDelete(ctx context.Context, tenant string, tag string) (updatedCount int64, err error) + // TagCreate creates new tag. + // + // It returns the inserted ID or an error if any. + TagCreate(ctx context.Context, tag *models.Tag) (insertedID string, err error) + + // TagConflicts checks for uniqueness violations of tag attributes within a namespace. + // Only non-zero values in the target are checked for conflicts. + // + // Example: + // conflicts, _, _ := store.TagConflicts(context.Background(), "tenant123", &models.TagConflicts{Name: "development"}) + // println(conflicts) // => []string{"name"} + // + // It returns an array of conflicting attribute fields and an error, if any. + TagConflicts(ctx context.Context, tenantID string, target *models.TagConflicts) (conflicts []string, has bool, err error) + + // TagList retrieves a list of tags based on the provided options. + // + // It returns the list of tags, the total count of matching documents (ignoring pagination), and an error if any. + TagList(ctx context.Context, opts ...QueryOption) (tags []models.Tag, totalCount int, err error) + + // TagResolve fetches a tag using a specific resolver. + // + // It returns the resolved tag if found and an error, if any. + TagResolve(ctx context.Context, resolver TagResolver, value string, opts ...QueryOption) (tag *models.Tag, err error) + + // TagUpdate updates a tag. + // + // It returns an error, if any, or store.ErrNoDocuments if the tag does not exist. + TagUpdate(ctx context.Context, tag *models.Tag) error + + // TagPushToTarget pushs an existent tag to the provided target. + // + // Returns an error if any issues occur during the tag addition or ErrNoDocuments when matching documents are found. + TagPushToTarget(ctx context.Context, id string, target TagTarget, targetID string) (err error) + + // TagPullFromTarget removes a tag from tagged documents in a namespace. If targetsID is empty it removes the tag from + // all documents of the selected target type. If targetsID contains specific target IDs it only removes the tag from those + // documents. + // + // Returns ErrNoDocuments if no matching documents found or other errors from the operation. + TagPullFromTarget(ctx context.Context, id string, target TagTarget, targetIDs ...string) (err error) + + // TagUpdate deletes a tag. + // + // It returns an error, if any, or store.ErrNoDocuments if the tag does not exist. + TagDelete(ctx context.Context, tag *models.Tag) error } diff --git a/api/store/transaction.go b/api/store/transaction.go new file mode 100644 index 00000000000..560189a1e48 --- /dev/null +++ b/api/store/transaction.go @@ -0,0 +1,20 @@ +package store + +import ( + "context" + "errors" +) + +var ErrStartTransactionFailed = errors.New("start transaction failed") + +// TransactionCb defines the function signature expected for transaction operations. +// It typically encompasses a series of store method calls that must be executed within a transaction. +type TransactionCb func(ctx context.Context) error + +type TransactionStore interface { + // WithTransaction executes a callback cb within a transaction, ensuring that a series of store + // operations are executed as a single unit, committing the changes when the callback returns nil. + // If any operation fails, the transaction is aborted, rolling back all operations and returning the + // error from the callback. It returns ErrTransactionFailed if the transaction cannot start. + WithTransaction(ctx context.Context, cb TransactionCb) error +} diff --git a/api/store/tunnel.go b/api/store/tunnel.go new file mode 100644 index 00000000000..5941e03641f --- /dev/null +++ b/api/store/tunnel.go @@ -0,0 +1,8 @@ +package store + +import "context" + +type TunnelStore interface { + // TunnelUpdateDeviceUID changes all tunnels from oldUID to newUID within the specified tenantID. + TunnelUpdateDeviceUID(ctx context.Context, tenantID, oldUID, newUID string) error +} diff --git a/api/store/user-invitations.go b/api/store/user-invitations.go new file mode 100644 index 00000000000..2779d42621e --- /dev/null +++ b/api/store/user-invitations.go @@ -0,0 +1,11 @@ +package store + +import ( + "context" +) + +type UserInvitationsStore interface { + // UserInvitationsUpsert creates a new user invitation or updates an existing one with the provided email. + // It returns the upserted ID or an error, if any. + UserInvitationsUpsert(ctx context.Context, email string) (upsertedID string, err error) +} diff --git a/api/store/user.go b/api/store/user.go index b332ff0b404..31abec20cc0 100644 --- a/api/store/user.go +++ b/api/store/user.go @@ -3,23 +3,43 @@ package store import ( "context" - "github.com/shellhub-io/shellhub/pkg/api/paginator" "github.com/shellhub-io/shellhub/pkg/models" ) +type UserResolver uint + +const ( + UserIDResolver UserResolver = iota + 1 + UserEmailResolver + UserUsernameResolver +) + type UserStore interface { - UserList(ctx context.Context, pagination paginator.Query, filters []models.Filter) ([]models.User, int, error) - UserCreate(ctx context.Context, user *models.User) error - UserGetByUsername(ctx context.Context, username string) (*models.User, error) - UserGetByEmail(ctx context.Context, email string) (*models.User, error) - UserGetByID(ctx context.Context, id string, ns bool) (*models.User, int, error) - UserUpdateData(ctx context.Context, id string, user models.User) error - UserUpdatePassword(ctx context.Context, newPassword string, id string) error - UserUpdateFromAdmin(ctx context.Context, name string, username string, email string, password string, id string) error - UserCreateToken(ctx context.Context, token *models.UserTokenRecover) error - UserGetToken(ctx context.Context, id string) (*models.UserTokenRecover, error) - UserDeleteTokens(ctx context.Context, id string) error - UserUpdateAccountStatus(ctx context.Context, id string) error - UserDetachInfo(ctx context.Context, id string) (map[string][]*models.Namespace, error) - UserDelete(ctx context.Context, id string) error + UserList(ctx context.Context, opts ...QueryOption) ([]models.User, int, error) + + // UserCreate creates a new user with the provided data. `user.CreatedAt` is set to now before save. + // It returns the inserted ID or an error, if any. + UserCreate(ctx context.Context, user *models.User) (insertedID string, err error) + + // UserResolve fetches a device using a specific resolver within a given tenant ID. + // + // It returns the resolved user if found and an error, if any. + UserResolve(ctx context.Context, resolver UserResolver, value string, opts ...QueryOption) (*models.User, error) + + // UserConflicts reports whether the target contains conflicting attributes with the database. Pass zero values for + // attributes you do not wish to match on. For example, the following call checks for conflicts based on email only: + // + // ctx := context.Background() + // conflicts, has, err := store.UserConflicts(ctx, &models.UserConflicts{Email: "john.doe@test.com", Username: ""}) + // + // It returns an array of conflicting attribute fields and an error, if any. + UserConflicts(ctx context.Context, target *models.UserConflicts) (conflicts []string, has bool, err error) + + UserUpdate(ctx context.Context, user *models.User) error + + // UserGetInfo retrieves the user's information, like the owned and associated namespaces. + // It returns an error if the user is not part of any namespace. + UserGetInfo(ctx context.Context, id string) (userInfo *models.UserInfo, err error) + + UserDelete(ctx context.Context, user *models.User) error } diff --git a/api/templates/kickstart.sh b/api/templates/kickstart.sh deleted file mode 100644 index aabe0ca912e..00000000000 --- a/api/templates/kickstart.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/sh - -# This file is a template which gets run through the "/kickstart.sh" route to -# generate the final script file. PLEASE NEVER RUN THIS FILE DIRECTLY, instead -# run me with: -# -# curl -sSf "http:///kickstart.sh?tenant_id=" | sh -# -# Where: -# is the ShellHub server address -# is your tenant ID -# -# List of URL parameters of /kickstart.sh URL: -# -# keepalive_interval = Specifies in seconds the keep alive message interval -# preferred_hostname = The preferred hostname to use rather than generated -# value from ethernet MAC address - -type docker > /dev/null 2>&1 || { echo "Docker is not instaled"; exit 1; } - -if ! docker info > /dev/null 2>&1; then - cat < " && exit 1 - -NAMESPACE=$1 -OWNER=$2 - -docker-compose exec cli ./cli add-namespace "$NAMESPACE" "$OWNER" "$TENANT_ID" diff --git a/bin/add-user b/bin/add-user deleted file mode 100755 index 750f7c796f4..00000000000 --- a/bin/add-user +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh - -[ $# -ne 3 ] && echo "Usage: $0 " && exit 1 - -USERNAME=$1 -PASSWORD=$2 -EMAIL=$3 - -docker-compose exec cli ./cli add-user $USERNAME $PASSWORD $EMAIL diff --git a/bin/add-user-namespace b/bin/add-user-namespace deleted file mode 100755 index 9b43790c26a..00000000000 --- a/bin/add-user-namespace +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh - -[ $# -ne 3 ] && echo "Usage: $0 " && exit 1 - -USERNAME=$1 -NAMESPACE=$2 -ROLE=$3 - -docker-compose exec cli ./cli add-user-namespace $USERNAME $NAMESPACE $ROLE diff --git a/bin/cli b/bin/cli index 227d51f25ae..dfe7f060b7a 100755 --- a/bin/cli +++ b/bin/cli @@ -1,8 +1,12 @@ #!/bin/sh -# -# CLI script is a inferface to executes commands on the CLI service. -# The CLI service contains a set of commands to manage users, namesapces and members. -shift $@ # remove the first argument; script name. +# CLI script is an interface to execute commands on the CLI service. +# The CLI service contains a set of commands to manage users, namespaces and members. -docker-compose exec cli ./cli $@ +. "$(dirname "$0")/utils" + +cd $(dirname $(readlink_f $0))/../ + +exit_if_not_running + +exec docker compose exec cli ./cli $@ diff --git a/bin/del-namespace b/bin/del-namespace deleted file mode 100755 index 6dc14430542..00000000000 --- a/bin/del-namespace +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh - -[ -z $1 ] && echo "Usage: $0 " && exit 1 - -NAMESPACE=$1 - -docker-compose exec cli ./cli del-namespace $NAMESPACE diff --git a/bin/del-user b/bin/del-user deleted file mode 100755 index 41fa7399697..00000000000 --- a/bin/del-user +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh - -[ -z $1 ] && echo "Usage: $0 " && exit 1 - -USERNAME=$1 - -docker-compose exec cli ./cli del-user $USERNAME diff --git a/bin/del-user-namespace b/bin/del-user-namespace deleted file mode 100755 index cea542d665b..00000000000 --- a/bin/del-user-namespace +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh - -[ $# -ne 2 ] && echo "Usage: $0 " && exit 1 - -USERNAME=$1 -NAMESPACE=$2 - -docker-compose exec cli ./cli del-user-namespace $USERNAME $NAMESPACE diff --git a/bin/docker-compose b/bin/docker-compose index 46565c25e4b..dd40119bf37 100755 --- a/bin/docker-compose +++ b/bin/docker-compose @@ -1,69 +1,9 @@ #!/bin/sh -readlink_f() { - (if uname | grep -q 'Darwin'; then - # Use greadlink if available, otherwise it behaves like "readlink -f" option - if command -v greadlink >/dev/null 2>&1; then - greadlink -f "$1" - else - target_file="$1" - cd "$(dirname "$target_file")" - target_file=$(basename "$target_file") - - while [ -L "$target_file" ] - do - target_file=$(readlink "$target_file") - cd "$(dirname "$target_file")" - target_file=$(basename "$target_file") - done - - echo "$(pwd -P)/$target_file" - fi - else - readlink -f "$1" - fi) -} - -# The following code checks if the docker compose plugin or standalone binary -# supports specification V2 by looking at the version number. If it does, -# it sets the COMPOSE_COMMAND variable to the corresponding command. -if docker compose version --short 2> /dev/null | grep -q "^2\."; then - COMPOSE_COMMAND="docker" - COMPOSE_ARGS="compose" -elif docker-compose version --short 2> /dev/null | grep -q "^2\."; then - COMPOSE_COMMAND="docker-compose" -fi - -if [ -z "$COMPOSE_COMMAND" ]; then - echo >&2 "ERROR: Cannot find Docker Compose compatible with V2 spec" - exit 1 -fi +. "$(dirname "$0")/utils" WORKDIR=$(dirname $(readlink_f $0))/../ cd $WORKDIR -set -o allexport - -env_override=${ENV_OVERRIDE:-./.env.override} -if [ -f "$env_override" ]; then - echo "INFO: Loading $env_override" - . "$env_override" -fi - -set +o allexport - -COMPOSE_FILE="docker-compose.yml" - -[ "$SHELLHUB_AUTO_SSL" = "true" ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.autossl.yml" -[ "$SHELLHUB_ENV" = "development" ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.dev.yml" -[ "$SHELLHUB_ENTERPRISE" = "true" ] && [ "$SHELLHUB_ENV" != "development" ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.enterprise.yml" -[ "$SHELLHUB_CONNECTOR" = "true" ] && [ "$SHELLHUB_ENV" = "development" ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.connector.dev.yml" -[ "$SHELLHUB_CONNECTOR" = "true" ] && [ "$SHELLHUB_ENV" != "development" ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.connector.yml" -[ -f docker-compose.override.yml ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.override.yml" - -[ -f "$EXTRA_COMPOSE_FILE" ] && COMPOSE_FILE="${COMPOSE_FILE}:${EXTRA_COMPOSE_FILE}" - -export COMPOSE_FILE - -exec $COMPOSE_COMMAND $COMPOSE_ARGS "$@" +exec docker compose "$@" diff --git a/bin/reset-user-password b/bin/reset-user-password deleted file mode 100755 index 0ce27c536ba..00000000000 --- a/bin/reset-user-password +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh - -[ -z $1 ] || [ -z $2 ] && echo "Usage: $0 " && exit 1 - -USERNAME=$1 -PASSWORD=$2 - -docker-compose exec cli ./cli reset-user-password $USERNAME $PASSWORD diff --git a/bin/setup b/bin/setup new file mode 100755 index 00000000000..4d979e636b1 --- /dev/null +++ b/bin/setup @@ -0,0 +1,28 @@ +#!/bin/sh + +. "$(dirname "$0")/utils" + +echo "🌟 Welcome to the ShellHub Setup Script" +echo "" + +exit_if_not_running + +if curl -s http://localhost/info | grep -q '"setup":true'; then + echo "✅ Setup has already been completed. No further action is required." + exit 0 +fi + +echo "📋 This script will generate a valid URL to set up your ShellHub instance." +echo "❗ Important: The ShellHub instance cannot be on localhost. Please ensure you provide a valid public IP address or hostname." +echo "" + +KEY="api_private_key" + +SIGNATURE=$(echo -n "shellhub" | openssl dgst -sha256 -sign "$KEY" | sha256sum | cut -d' ' -f1) +URL="http://localhost/setup?sign=$(printf '%s' "$SIGNATURE")" + +echo "🔗 Generated Setup URL:" +echo "$URL" +echo "" +echo "🚀 You can use this URL to complete the setup of your ShellHub instance." +echo "✅ Please ensure your ShellHub server is running and accessible from an external address." diff --git a/bin/utils b/bin/utils new file mode 100644 index 00000000000..6438c9c16ed --- /dev/null +++ b/bin/utils @@ -0,0 +1,52 @@ +#!/bin/sh + +readlink_f() { + (if uname | grep -q 'Darwin'; then + # Use greadlink if available, otherwise it behaves like "readlink -f" option + if command -v greadlink >/dev/null 2>&1; then + greadlink -f "$1" + else + target_file="$1" + cd "$(dirname "$target_file")" + target_file=$(basename "$target_file") + + while [ -L "$target_file" ] + do + target_file=$(readlink "$target_file") + cd "$(dirname "$target_file")" + target_file=$(basename "$target_file") + done + + echo "$(pwd -P)/$target_file" + fi + else + readlink -f "$1" + fi) +} + +exit_if_not_running() { + SERVICES=$(docker compose ps -q | grep -v INFO: | xargs docker inspect -f '{{.State.Running}}' 2>/dev/null | grep -c 'true') + + [ "$SERVICES" -le 0 ] && { echo "🚫 ERROR: ShellHub is not running. Exiting."; exit 1; } +} + +set -o allexport + +env_override=${ENV_OVERRIDE:-./.env.override} +if [ -f "$env_override" ]; then + echo "INFO: Loading $env_override" + . "$env_override" +fi + +set +o allexport + +COMPOSE_FILE="docker-compose.yml" + +[ "$SHELLHUB_AUTO_SSL" = "true" ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.autossl.yml" +[ "$SHELLHUB_ENV" = "development" ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.dev.yml:docker-compose.agent.yml" +[ "$SHELLHUB_ENTERPRISE" = "true" ] && [ "$SHELLHUB_ENV" != "development" ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.enterprise.yml" +[ -f docker-compose.override.yml ] && COMPOSE_FILE="${COMPOSE_FILE}:docker-compose.override.yml" + +[ -f "$EXTRA_COMPOSE_FILE" ] && COMPOSE_FILE="${COMPOSE_FILE}:${EXTRA_COMPOSE_FILE}" + +export COMPOSE_FILE diff --git a/cli/.air.toml b/cli/.air.toml new file mode 100644 index 00000000000..1b24ad97a00 --- /dev/null +++ b/cli/.air.toml @@ -0,0 +1,16 @@ +root = "../" + +[build] + cmd = "go build -o cli ." + bin = "/bin/true" + delay = 1000 + exclude_regex = ["_test.go"] + exclude_unchanged = false + follow_symlink = false + include_dir = ["pkg", "cli"] + include_ext = ["go"] + kill_delay = "0s" + stop_on_error = true + +[misc] + clean_on_exit = true diff --git a/cli/Dockerfile b/cli/Dockerfile index d11cfaf6cca..25117b58299 100644 --- a/cli/Dockerfile +++ b/cli/Dockerfile @@ -1,5 +1,5 @@ # base stage -FROM golang:1.20.4-alpine3.16 AS base +FROM golang:1.24-alpine3.22 AS base RUN apk add --no-cache git ca-certificates @@ -30,25 +30,25 @@ WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/cli RUN go build # development stage -FROM base AS development +FROM builder AS development RUN apk add --update openssl build-base docker-cli -RUN go install github.com/markbates/refresh@v1.11.1 && \ - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.53.3 +RUN go install github.com/cosmtrek/air@v1.51.0 && \ + go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.1.6 WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub RUN go mod download -COPY ./cli/entrypoint-dev.sh /entrypoint.sh +COPY ./cli/entrypoint-dev.sh /entrypoint-dev.sh WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/cli -ENTRYPOINT ["/entrypoint.sh"] +RUN go build + +ENTRYPOINT ["/entrypoint-dev.sh"] # production stage -FROM alpine:3.19.0 AS production +FROM alpine:3.23.2 AS production COPY --from=builder /go/src/github.com/shellhub-io/shellhub/cli/cli /cli - -ENTRYPOINT /cli diff --git a/cli/Dockerfile.test b/cli/Dockerfile.test new file mode 100644 index 00000000000..47da6b8d546 --- /dev/null +++ b/cli/Dockerfile.test @@ -0,0 +1,27 @@ +# base stage +FROM golang:1.24-alpine3.22 AS base + +RUN apk add --no-cache git ca-certificates + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub + +COPY ./go.mod ./ +COPY ./api/go.mod ./api/go.mod + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/cli + +COPY ./cli/go.mod ./cli/go.sum ./ + +RUN go mod download + +COPY ./api $GOPATH/src/github.com/shellhub-io/shellhub/api +COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg +COPY ./cli . + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub + +RUN go mod download + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/cli + +RUN go build diff --git a/cli/README.md b/cli/README.md index 47500134ab1..0d8a1172c67 100644 --- a/cli/README.md +++ b/cli/README.md @@ -7,4 +7,4 @@ of the functions to add, delete and modify data on the database. This service running in background and is not supposed to be accessed directly unless you know what are you doing. -To access this service in the right way, we provide a help documentation that can be accessed here: [How to managing your data](https://docs.shellhub.io/admin-manual/managing/). +To access this service in the right way, we provide a help documentation that can be accessed here: [How to managing your data](https://docs.shellhub.io/self-hosted/administration). diff --git a/cli/cmd/deprecated.go b/cli/cmd/deprecated.go index 4d9c1d6f440..25b484f8fd6 100644 --- a/cli/cmd/deprecated.go +++ b/cli/cmd/deprecated.go @@ -3,6 +3,7 @@ package cmd import ( "github.com/shellhub-io/shellhub/cli/pkg/inputs" "github.com/shellhub-io/shellhub/cli/services" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/spf13/cobra" ) @@ -104,7 +105,7 @@ func DeprecatedCommands(cmd *cobra.Command, service services.Services) { input := &inputs.MemberAdd{ Username: args[0], Namespace: args[1], - Role: args[2], + Role: authorizer.RoleFromString(args[2]), } ns, err := service.NamespaceAddMember(cmd.Context(), input) if err != nil { diff --git a/cli/cmd/namespace.go b/cli/cmd/namespace.go index 033eb83ef08..bd3ce5a83dc 100644 --- a/cli/cmd/namespace.go +++ b/cli/cmd/namespace.go @@ -24,15 +24,16 @@ func NamespaceCommands(service services.Services) *cobra.Command { } func namespaceCreate(service services.Services) *cobra.Command { - return &cobra.Command{ + cmdNamespace := &cobra.Command{ Use: "create [tenant]", Short: "Create a namespace", - Long: `Creates a new namespace in the system using the provided namespace name, associated owner's username, and an optional tenant ID. + Long: `Creates a new namespace in the system using the provided namespace name, associated owner's username, and an optional tenant ID and Type. The owner must be a valid username within the system. If a tenant ID is provided, it should be in UUID format.`, - Example: `cli namespace create dev john_doe`, - Args: cobra.RangeArgs(2, 3), + Example: `cli namespace create dev john_doe --type=team`, + Args: cobra.RangeArgs(2, 4), RunE: func(cmd *cobra.Command, args []string) error { // Avoid panic when TenantID isn't provided. + if len(args) == 2 { args = append(args, "") } @@ -43,6 +44,12 @@ The owner must be a valid username within the system. If a tenant ID is provided return err } + typeNamespace, err := cmd.Flags().GetString("type") + if err != nil { + return err + } + input.Type = typeNamespace + namespace, err := service.NamespaceCreate(cmd.Context(), &input) if err != nil { return err @@ -52,10 +59,15 @@ The owner must be a valid username within the system. If a tenant ID is provided cmd.Println("Namespace:", namespace.Name) cmd.Println("Tenant:", namespace.TenantID) cmd.Println("Owner:", namespace.Owner) + cmd.Println("Type:", namespace.Type) return nil }, } + + cmdNamespace.PersistentFlags().String("type", "team", "type") + + return cmdNamespace } func namespaceDelete(service services.Services) *cobra.Command { diff --git a/cli/cmd/user.go b/cli/cmd/user.go index e3c59546fd1..8b9c2504ab4 100644 --- a/cli/cmd/user.go +++ b/cli/cmd/user.go @@ -24,13 +24,16 @@ func UserCommands(service services.Services) *cobra.Command { } func userCreate(service services.Services) *cobra.Command { - return &cobra.Command{ + var admin bool + + cmd := &cobra.Command{ Use: "create ", Args: cobra.ExactArgs(3), Short: "Create a user.", Long: `Creates a new user in the system using the provided username, password, and email. The username must be unique, and the password should meet the system's security requirements.`, - Example: `cli user create john_doe Secret123!- john.doe@test.com`, + Example: `cli user create john_doe Secret123!- john.doe@test.com +cli user create john_doe Secret123!- john.doe@test.com --admin`, RunE: func(cmd *cobra.Command, args []string) error { var input inputs.UserCreate @@ -38,6 +41,8 @@ The username must be unique, and the password should meet the system's security return err } + input.Admin = admin + user, err := service.UserCreate(cmd.Context(), &input) if err != nil { return err @@ -50,6 +55,10 @@ The username must be unique, and the password should meet the system's security return nil }, } + + cmd.Flags().BoolVar(&admin, "admin", false, "Create user with admin privileges") + + return cmd } func userResetPassword(service services.Services) *cobra.Command { diff --git a/cli/entrypoint-dev.sh b/cli/entrypoint-dev.sh index 0cf2f674a36..07b9c30562d 100755 --- a/cli/entrypoint-dev.sh +++ b/cli/entrypoint-dev.sh @@ -1,5 +1,3 @@ #!/bin/sh -refresh run & - -sleep infinity +exec air diff --git a/cli/entrypoint.sh b/cli/entrypoint.sh deleted file mode 100755 index 67b6e4aa5cb..00000000000 --- a/cli/entrypoint.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh - -gomplate -f /etc/nginx/conf.d/shellhub.conf -o /etc/nginx/conf.d/shellhub.conf - -exec "$@" diff --git a/cli/go.mod b/cli/go.mod index ac383403f6c..e3cc45fb338 100644 --- a/cli/go.mod +++ b/cli/go.mod @@ -1,19 +1,22 @@ module github.com/shellhub-io/shellhub/cli -go 1.20 +go 1.24.9 require ( github.com/shellhub-io/shellhub v0.13.4 github.com/shellhub-io/shellhub/api v0.13.4 - github.com/sirupsen/logrus v1.9.3 - github.com/spf13/cobra v1.8.0 - github.com/stretchr/testify v1.8.4 - go.mongodb.org/mongo-driver v1.13.1 + github.com/sirupsen/logrus v1.9.4 + github.com/spf13/cobra v1.10.2 + github.com/stretchr/testify v1.11.1 ) require ( - github.com/andybalholm/brotli v1.0.5 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/andybalholm/brotli v1.1.0 // indirect + github.com/bodgit/plumbing v1.2.0 // indirect + github.com/bodgit/sevenzip v1.3.0 // indirect + github.com/bodgit/windows v1.0.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/connesc/cipherio v0.2.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect @@ -22,46 +25,50 @@ require ( github.com/go-playground/validator/v10 v10.11.2 // indirect github.com/go-redis/cache/v8 v8.4.4 // indirect github.com/go-redis/redis/v8 v8.11.5 // indirect - github.com/golang-jwt/jwt/v4 v4.5.0 // indirect - github.com/golang/snappy v0.0.4 // indirect - github.com/google/uuid v1.3.0 // indirect + github.com/go-resty/resty/v2 v2.7.0 // indirect + github.com/golang/snappy v1.0.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/klauspost/compress v1.16.0 // indirect + github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/pgzip v1.2.5 // indirect - github.com/labstack/echo/v4 v4.11.4 // indirect + github.com/labstack/echo/v4 v4.15.0 // indirect github.com/labstack/gommon v0.4.2 // indirect github.com/leodido/go-urn v1.2.2 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mholt/archiver/v3 v3.5.1 // indirect - github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect - github.com/nwaples/rardecode v1.1.3 // indirect + github.com/mholt/archiver/v4 v4.0.0-alpha.8 // indirect + github.com/montanaflynn/stats v0.7.1 // indirect + github.com/nwaples/rardecode/v2 v2.2.0 // indirect github.com/oschwald/geoip2-golang v1.8.0 // indirect github.com/oschwald/maxminddb-golang v1.10.0 // indirect github.com/pierrec/lz4/v4 v4.1.17 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/sethvargo/go-envconfig v0.9.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect + github.com/spf13/pflag v1.0.9 // indirect github.com/square/mongo-lock v0.0.0-20230808145049-cfcf499f6bf0 // indirect - github.com/stretchr/objx v0.5.0 // indirect - github.com/ulikunitz/xz v0.5.11 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/therootcompany/xz v1.0.1 // indirect + github.com/ulikunitz/xz v0.5.14 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect github.com/vmihailenco/go-tinylfu v0.2.2 // indirect github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - github.com/xakep666/mongo-migrate v0.2.1 // indirect + github.com/xakep666/mongo-migrate v0.3.2 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect - github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect - github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect - golang.org/x/crypto v0.18.0 // indirect - golang.org/x/net v0.19.0 // indirect - golang.org/x/sync v0.1.0 // indirect - golang.org/x/sys v0.16.0 // indirect - golang.org/x/text v0.14.0 // indirect + github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect + go.mongodb.org/mongo-driver v1.17.7 // indirect + go4.org v0.0.0-20200411211856-f5505b9728dd // indirect + golang.org/x/crypto v0.47.0 // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.40.0 // indirect + golang.org/x/text v0.33.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/cli/go.sum b/cli/go.sum index 1ff99ea8c96..a5325b3c005 100644 --- a/cli/go.sum +++ b/cli/go.sum @@ -1,24 +1,97 @@ -github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= -github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs= -github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M= +github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= +github.com/bodgit/plumbing v1.2.0 h1:gg4haxoKphLjml+tgnecR4yLBV5zo4HAZGCtAh3xCzM= +github.com/bodgit/plumbing v1.2.0/go.mod h1:b9TeRi7Hvc6Y05rjm8VML3+47n4XTZPtQ/5ghqic2n8= +github.com/bodgit/sevenzip v1.3.0 h1:1ljgELgtHqvgIp8W8kgeEGHIWP4ch3xGI8uOBZgLVKY= +github.com/bodgit/sevenzip v1.3.0/go.mod h1:omwNcgZTEooWM8gA/IJ2Nk/+ZQ94+GsytRzOJJ8FBlM= +github.com/bodgit/windows v1.0.0 h1:rLQ/XjsleZvx4fR1tB/UxQrK+SJ2OFHzfPjLWWOhDIA= +github.com/bodgit/windows v1.0.0/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/connesc/cipherio v0.2.1 h1:FGtpTPMbKNNWByNrr9aEBtaJtXjqOzkIXNYJp6OEycw= +github.com/connesc/cipherio v0.2.1/go.mod h1:ukY0MWJDFnJEbXMQtOcn2VmTpRfzcTz4OoVrWGGJZcA= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= +github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM= +github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= +github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY= github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= +github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw= +github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= @@ -30,13 +103,27 @@ github.com/go-redis/cache/v8 v8.4.4/go.mod h1:JM6CkupsPvAu/LYEVGQy6UB4WDAzQSXkR0 github.com/go-redis/redis/v8 v8.11.3/go.mod h1:xNJ9xDG09FsIPwh3bWdk+0oDWHbtF9rPN0F/oD9XeKc= github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= +github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY= +github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-test/deep v1.0.1 h1:UQhStjbkDClarlmv0am7OXXO4/GaPdCGiUiMTvi28sg= github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -47,25 +134,46 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= -github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs= +github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.16.0 h1:iULayQNOReoYUe+1qtKOqw9CwJv3aNQu8ivo7lw1HU4= -github.com/klauspost/compress v1.16.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= @@ -76,24 +184,43 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.11.4 h1:vDZmA+qNeh1pd/cCkEicDMrjtrnMGQ1QFI9gWN1zGq8= -github.com/labstack/echo/v4 v4.11.4/go.mod h1:noh7EvLwqDsmh/X/HWKPUl1AjzJrhyptRyEbQJfxen8= +github.com/labstack/echo/v4 v4.15.0 h1:hoRTKWcnR5STXZFe9BmYun9AMTNeSbjHi2vtDuADJ24= +github.com/labstack/echo/v4 v4.15.0/go.mod h1:xmw1clThob0BSVRX1CRQkGQ/vjwcpOMjQZSZa9fKA/c= github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0= github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU= github.com/leodido/go-urn v1.2.2 h1:7z68G0FCGvDk646jz1AelTYNYWrTNm0bEcFAo147wt4= github.com/leodido/go-urn v1.2.2/go.mod h1:kUaIbLZWttglzwNuG0pgsh5vuV6u2YcGBYz1hIPjtOQ= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74 h1:1KuuSOy4ZNgW0KA2oYIngXVFhQcXxhLqCVK7cBcldkk= +github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo= -github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= +github.com/mholt/archiver/v4 v4.0.0-alpha.8 h1:tRGQuDVPh66WCOelqe6LIGh0gwmfwxUrSSDunscGsRM= +github.com/mholt/archiver/v4 v4.0.0-alpha.8/go.mod h1:5f7FUYGXdJWUjESffJaYR4R60VhnHxb2X3T1teMyv5A= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= +github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= -github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= -github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc= -github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= +github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= +github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A= +github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= @@ -101,58 +228,82 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.15.0/go.mod h1:cIuvLEne0aoVhAgh/O6ac0Op8WWw9H6eYCriF+tEHG0= github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= +github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/oschwald/geoip2-golang v1.8.0 h1:KfjYB8ojCEn/QLqsDU0AzrJ3R5Qa9vFlx3z6SLNcKTs= github.com/oschwald/geoip2-golang v1.8.0/go.mod h1:R7bRvYjOeaoenAp9sKRS8GX5bJWcZ0laWO5+DauEktw= github.com/oschwald/maxminddb-golang v1.10.0 h1:Xp1u0ZhqkSuopaKmk1WwHtjF0H9Hd9181uj2MQ5Vndg= github.com/oschwald/maxminddb-golang v1.10.0/go.mod h1:Y2ELenReaLAZ0b400URyGwvYxHV1dLIxBuyOsyYjHK0= -github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc= github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk= github.com/rwtodd/Go.Sed v0.0.0-20210816025313-55464686f9ef/go.mod h1:8AEUvGVi2uQ5b24BIhcr0GCcpd/RNAFWaN2CJFrWIIQ= github.com/sethvargo/go-envconfig v0.9.0 h1:Q6FQ6hVEeTECULvkJZakq3dZMeBQ3JUpcKMfPQbKMDE= github.com/sethvargo/go-envconfig v0.9.0/go.mod h1:Iz1Gy1Sf3T64TQlJSvee81qDhf7YIlt8GMUX6yyNFs0= github.com/shellhub-io/mongotest v0.0.0-20230928124937-e33b07010742 h1:sIFW1zdZvMTAvpHYOphDoWSh4tiGloK0El2GZni4E+U= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= -github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/shellhub-io/mongotest v0.0.0-20230928124937-e33b07010742/go.mod h1:6J6yfW5oIvAZ6VjxmV9KyFZyPFVM3B4V3Epbb+1c0oo= +github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs= +github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= +github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w= +github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g= +github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= +github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= +github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/square/mongo-lock v0.0.0-20230808145049-cfcf499f6bf0 h1:wnVho7xObpxuF7Lr0146VZtfOLfbkXGcvzfFUw2LXuM= github.com/square/mongo-lock v0.0.0-20230808145049-cfcf499f6bf0/go.mod h1:bLPJcGVut+NBtZhrqY/jTnfluDrZeuIvf66VjuwU/eU= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/tidwall/pretty v0.0.0-20190325153808-1166b9ac2b65/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU= +github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY= +github.com/testcontainers/testcontainers-go/modules/mongodb v0.40.0 h1:z/1qHeliTLDKNaJ7uOHOx1FjwghbcbYfga4dTFkF0hU= +github.com/testcontainers/testcontainers-go/modules/mongodb v0.40.0/go.mod h1:GaunAWwMXLtsMKG3xn2HYIBDbKddGArfcGsF2Aog81E= +github.com/therootcompany/xz v1.0.1 h1:CmOtsn1CbtmyYiusbfmhmkpAAETj0wBIH6kCYaX+xzw= +github.com/therootcompany/xz v1.0.1/go.mod h1:3K3UH1yCKgBneZYhuQUvJ9HPD19UEXEI0BWbMn8qNMY= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tklauser/go-sysconf v0.3.13 h1:GBUpcahXSpR2xN01jhkNAbTLRk2Yzgggk8IM08lq3r4= +github.com/tklauser/go-sysconf v0.3.13/go.mod h1:zwleP4Q4OehZHGn4CYZDipCgg9usW5IJePewFCGVEa0= +github.com/tklauser/numcpus v0.7.0 h1:yjuerZP127QG9m5Zh/mSO4wqurYil27tHrqwRoRjpr4= +github.com/tklauser/numcpus v0.7.0/go.mod h1:bb6dMVcj8A42tSE7i32fsIUCbQNllK5iDguyOZRUzAY= github.com/tkuchiki/go-timezone v0.2.2 h1:MdHR65KwgVTwWFQrota4SKzc4L5EfuH5SdZZGtk/P2Q= +github.com/tkuchiki/go-timezone v0.2.2/go.mod h1:oFweWxYl35C/s7HMVZXiA19Jr9Y0qJHMaG/J2TES4LY= github.com/tkuchiki/parsetime v0.3.0 h1:cvblFQlPeAPJL8g6MgIGCHnnmHSZvluuY+hexoZCNqc= +github.com/tkuchiki/parsetime v0.3.0/go.mod h1:OJkQmIrf5Ao7R+WYIdITPOfDVj8LmnHGCfQ8DTs3LCA= github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8= -github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= +github.com/ulikunitz/xz v0.5.14 h1:uv/0Bq533iFdnMHZdRBTOlaNMdb1+ZxXIlHDZHIHcvg= +github.com/ulikunitz/xz v0.5.14/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= @@ -164,8 +315,8 @@ github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9 github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -github.com/xakep666/mongo-migrate v0.2.1 h1:pRK966a44ujuGMEl73MOzv4MajcH8Q6MWo+TBlxjhvs= -github.com/xakep666/mongo-migrate v0.2.1/go.mod h1:pVQysP+es2wX4TaeVd7zLkRZhKMcBqcC/KRyLms6Eyk= +github.com/xakep666/mongo-migrate v0.3.2 h1:qmDtIGiMRIwMvc84fOlsDoP+08S6NWLJDPqa4wPfQ1U= +github.com/xakep666/mongo-migrate v0.3.2/go.mod h1:onPlsF/AvU9UZjlyX3PiC5iAPHYJuejPPPqlOvsCGhM= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= @@ -174,57 +325,127 @@ github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3k github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= -github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= -github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= -github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo= -github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.mongodb.org/mongo-driver v1.0.2/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= go.mongodb.org/mongo-driver v1.9.1/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= -go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= -go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= +go.mongodb.org/mongo-driver v1.17.7 h1:a9w+U3Vt67eYzcfq3k/OAv284/uUUkL0uP75VE5rCOU= +go.mongodb.org/mongo-driver v1.17.7/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 h1:Xs2Ncz0gNihqu9iosIZ5SkBbWo5T8JhhLJFMQL1qmLI= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0/go.mod h1:vy+2G/6NvVMpwGX/NyLqcC41fxepnuKHk16E6IZUcJc= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +go4.org v0.0.0-20200411211856-f5505b9728dd h1:BNJlw5kRTzdmyfh5U8F93HA2OwkP7ZGwA51eJ/0wKOU= +go4.org v0.0.0-20200411211856-f5505b9728dd/go.mod h1:CIiUVy99QCPfoE13bO4EZaz5GZMZXMSBGhxRdsvzbkg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= +golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= +golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= +golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201008141435-b3e1573b7520/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -232,34 +453,90 @@ golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= +golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -272,11 +549,11 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637 h1:yiW+nvdHb9LVqSHQBXfZCieqV4fzYhNBql77zY0ykqs= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= @@ -285,3 +562,11 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/cli/main.go b/cli/main.go index 39c2fea0963..075dbea77c2 100644 --- a/cli/main.go +++ b/cli/main.go @@ -6,14 +6,11 @@ import ( "github.com/shellhub-io/shellhub/api/store/mongo" "github.com/shellhub-io/shellhub/cli/cmd" "github.com/shellhub-io/shellhub/cli/services" - storecache "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/envs" "github.com/shellhub-io/shellhub/pkg/loglevel" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - mgo "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" - "go.mongodb.org/mongo-driver/x/mongo/driver/connstring" ) type config struct { @@ -26,32 +23,37 @@ func init() { } func main() { + ctx := context.Background() + cfg, err := envs.ParseWithPrefix[config]("CLI_") if err != nil { log.Error(err.Error()) } - connStr, err := connstring.ParseAndValidate(cfg.MongoURI) - if err != nil { - log.WithError(err).Fatal("Invalid Mongo URI format") - } + log.Info("Connecting to Redis") - client, err := mgo.Connect(context.Background(), options.Client().ApplyURI(cfg.MongoURI)) + cache, err := cache.NewRedisCache(cfg.RedisURI, 0) if err != nil { - log.Error(err) + log.Fatal(err) } - cache, err := storecache.NewRedisCache(cfg.RedisURI) + log.Info("Connected to Redis") + + log.Trace("Connecting to MongoDB") + + store, err := mongo.NewStore(ctx, cfg.MongoURI, cache) if err != nil { - log.Fatal(err) + log. + WithError(err). + Fatal("failed to create the store") } - service := services.NewService(mongo.NewStore(client.Database(connStr.Database), cache)) + service := services.NewService(store) rootCmd := &cobra.Command{Use: "cli"} - rootCmd.AddCommand(cmd.UserCommands(service)) rootCmd.AddCommand(cmd.NamespaceCommands(service)) + // WARN: this is deprecated and will be removed soon cmd.DeprecatedCommands(rootCmd, service) if err := rootCmd.Execute(); err != nil { diff --git a/cli/pkg/inputs/member.go b/cli/pkg/inputs/member.go index 66d2da91487..d5d0986dbb2 100644 --- a/cli/pkg/inputs/member.go +++ b/cli/pkg/inputs/member.go @@ -1,10 +1,12 @@ package inputs +import "github.com/shellhub-io/shellhub/pkg/api/authorizer" + // MemberAdd is a struct for handling input when adding a member. type MemberAdd struct { Username string `validate:"required,username"` Namespace string - Role string + Role authorizer.Role } // MemberRemove is a struct for handling input when removing a member. diff --git a/cli/pkg/inputs/namespace.go b/cli/pkg/inputs/namespace.go index 4c80f825cc4..abf433507f7 100644 --- a/cli/pkg/inputs/namespace.go +++ b/cli/pkg/inputs/namespace.go @@ -5,6 +5,7 @@ type NamespaceCreate struct { Namespace string `validate:"required,hostname_rfc1123,excludes=.,lowercase"` Owner string `validate:"required,username"` TenantID string `validate:"omitempty,uuid"` + Type string `validate:"omitempty,lowercase,oneof=personal team"` } // NamespaceDelete defines the structure for inputs when deleting a namespace. diff --git a/cli/pkg/inputs/user.go b/cli/pkg/inputs/user.go index e1700543137..709a70606e9 100644 --- a/cli/pkg/inputs/user.go +++ b/cli/pkg/inputs/user.go @@ -5,6 +5,7 @@ type UserCreate struct { Username string `validate:"required,username"` Password string `validate:"required,password"` Email string `validate:"required,email"` + Admin bool } // UserUpdate defines the structure for inputs when updating a user. diff --git a/cli/refresh.yml b/cli/refresh.yml deleted file mode 100644 index 2ec9d63ac0d..00000000000 --- a/cli/refresh.yml +++ /dev/null @@ -1,14 +0,0 @@ -app_root: /go/src/github.com/shellhub-io/shellhub -ignored_folders: -- vendor -included_extensions: -- .go -build_target_path: "" -build_path: /go/src/github.com/shellhub-io/shellhub/cli -build_flags: [] -build_delay: 200ns -binary_name: cli -command_flags: [] -command_env: [] -enable_colors: true -log_name: "" diff --git a/cli/services/namespaces.go b/cli/services/namespaces.go index 5fe7228123e..0cb52871a45 100644 --- a/cli/services/namespaces.go +++ b/cli/services/namespaces.go @@ -2,11 +2,12 @@ package services import ( "context" + "strings" - "github.com/shellhub-io/shellhub/api/pkg/guard" + "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/cli/pkg/inputs" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/clock" - "github.com/shellhub-io/shellhub/pkg/envs" "github.com/shellhub-io/shellhub/pkg/models" "github.com/shellhub-io/shellhub/pkg/uuid" ) @@ -24,36 +25,42 @@ func (s *service) NamespaceCreate(ctx context.Context, input *inputs.NamespaceCr return nil, ErrNamespaceInvalid } - user, err := s.store.UserGetByUsername(ctx, input.Owner) + user, err := s.store.UserResolve(ctx, store.UserUsernameResolver, strings.ToLower(input.Owner)) if err != nil { return nil, ErrUserNotFound } ns := &models.Namespace{ - Name: input.Namespace, - Owner: user.ID, - TenantID: input.TenantID, - MaxDevices: func() int { - if envs.IsCloud() { - return MaxNumberDevicesLimited - } - - return MaxNumberDevicesUnlimited - }(), + Name: input.Namespace, + Owner: user.ID, + TenantID: input.TenantID, + MaxDevices: getMaxDevices(), + DevicesAcceptedCount: 0, + DevicesPendingCount: 0, + DevicesRejectedCount: 0, + DevicesRemovedCount: 0, Members: []models.Member{ { - ID: user.ID, - Role: guard.RoleOwner, + ID: user.ID, + Role: authorizer.RoleOwner, + AddedAt: clock.Now(), }, }, Settings: &models.NamespaceSettings{ - SessionRecord: true, + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, CreatedAt: clock.Now(), + Type: models.NewDefaultType(), } - ns, err = s.store.NamespaceCreate(ctx, ns) - if err != nil { + if models.IsTypeTeam(input.Type) { + ns.Type = models.TypeTeam + } else if models.IsTypePersonal(input.Type) { + ns.Type = models.TypePersonal + } + + if _, err = s.store.NamespaceCreate(ctx, ns); err != nil { return nil, ErrDuplicateNamespace } @@ -66,18 +73,21 @@ func (s *service) NamespaceAddMember(ctx context.Context, input *inputs.MemberAd return nil, ErrInvalidFormat } - user, err := s.store.UserGetByUsername(ctx, input.Username) + user, err := s.store.UserResolve(ctx, store.UserUsernameResolver, strings.ToLower(input.Username)) if err != nil { return nil, ErrUserNotFound } - ns, err := s.store.NamespaceGetByName(ctx, input.Namespace) + ns, err := s.store.NamespaceResolve(ctx, store.NamespaceNameResolver, strings.ToLower(input.Namespace)) if err != nil { return nil, ErrNamespaceNotFound } - ns, err = s.store.NamespaceAddMember(ctx, ns.TenantID, user.ID, input.Role) - if err != nil { + if err = s.store.NamespaceCreateMembership(ctx, ns.TenantID, &models.Member{ + ID: user.ID, + Role: input.Role, + AddedAt: clock.Now(), + }); err != nil { return nil, ErrFailedNamespaceAddMember } @@ -90,18 +100,22 @@ func (s *service) NamespaceRemoveMember(ctx context.Context, input *inputs.Membe return nil, ErrInvalidFormat } - user, err := s.store.UserGetByUsername(ctx, input.Username) + user, err := s.store.UserResolve(ctx, store.UserUsernameResolver, strings.ToLower(input.Username)) if err != nil { return nil, ErrUserNotFound } - ns, err := s.store.NamespaceGetByName(ctx, input.Namespace) + ns, err := s.store.NamespaceResolve(ctx, store.NamespaceNameResolver, strings.ToLower(input.Namespace)) if err != nil { return nil, ErrNamespaceNotFound } - ns, err = s.store.NamespaceRemoveMember(ctx, ns.TenantID, user.ID) - if err != nil { + member, ok := ns.FindMember(user.ID) + if !ok { + return nil, ErrFailedNamespaceRemoveMember + } + + if err = s.store.NamespaceDeleteMembership(ctx, ns.TenantID, member); err != nil { return nil, ErrFailedNamespaceRemoveMember } @@ -114,12 +128,12 @@ func (s *service) NamespaceDelete(ctx context.Context, input *inputs.NamespaceDe return ErrNamespaceInvalid } - ns, err := s.store.NamespaceGetByName(ctx, input.Namespace) + ns, err := s.store.NamespaceResolve(ctx, store.NamespaceNameResolver, strings.ToLower(input.Namespace)) if err != nil { return ErrNamespaceNotFound } - if err := s.store.NamespaceDelete(ctx, ns.TenantID); err != nil { + if err := s.store.NamespaceDelete(ctx, ns); err != nil { return ErrFailedDeleteNamespace } diff --git a/cli/services/namespaces_test.go b/cli/services/namespaces_test.go index 4c6858c441d..e83dbaf72c0 100644 --- a/cli/services/namespaces_test.go +++ b/cli/services/namespaces_test.go @@ -6,10 +6,10 @@ import ( "testing" "time" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/api/store/mocks" "github.com/shellhub-io/shellhub/cli/pkg/inputs" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/clock" clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" "github.com/shellhub-io/shellhub/pkg/envs" @@ -37,14 +37,16 @@ func TestNamespaceCreate(t *testing.T) { namespace string username string tenant string + typeNamespace string requiredMocks func() expected Expected }{ { - description: "fails when namespace is not valid", - namespace: "", - username: "john_doe", - tenant: "00000000-0000-4000-0000-000000000000", + description: "fails when namespace is not valid", + namespace: "", + username: "john_doe", + tenant: "00000000-0000-4000-0000-000000000000", + typeNamespace: "", requiredMocks: func() { envMock := &env_mocks.Backend{} envs.DefaultBackend = envMock @@ -52,10 +54,11 @@ func TestNamespaceCreate(t *testing.T) { expected: Expected{nil, ErrNamespaceInvalid}, }, { - description: "fails when namespace is not valid due name", - namespace: "invalid_namespace", - username: "john_doe", - tenant: "00000000-0000-4000-0000-000000000000", + description: "fails when namespace is not valid due name", + namespace: "invalid_namespace", + username: "john_doe", + tenant: "00000000-0000-4000-0000-000000000000", + typeNamespace: "", requiredMocks: func() { envMock := &env_mocks.Backend{} envs.DefaultBackend = envMock @@ -63,26 +66,28 @@ func TestNamespaceCreate(t *testing.T) { expected: Expected{nil, ErrNamespaceInvalid}, }, { - description: "fails when could not find a user", - namespace: "namespace", - username: "john_doe", - tenant: "00000000-0000-4000-0000-000000000000", + description: "fails when could not find a user", + namespace: "namespace", + username: "john_doe", + tenant: "00000000-0000-4000-0000-000000000000", + typeNamespace: "", requiredMocks: func() { envMock := &env_mocks.Backend{} envs.DefaultBackend = envMock - mock.On("UserGetByUsername", ctx, "john_doe").Return(nil, errors.New("error")).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(nil, errors.New("error")).Once() }, expected: Expected{nil, ErrUserNotFound}, }, { - description: "fails when namespace is duplicated", - namespace: "namespace", - username: "john_doe", - tenant: "00000000-0000-0000-0000-000000000000", + description: "fails when namespace is duplicated", + namespace: "namespace", + username: "john_doe", + tenant: "00000000-0000-0000-0000-000000000000", + typeNamespace: "", requiredMocks: func() { envMock := &env_mocks.Backend{} envs.DefaultBackend = envMock - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() + envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Twice() envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() user := &models.User{ ID: "507f191e810c19729de860ea", @@ -92,31 +97,40 @@ func TestNamespaceCreate(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() namespace := &models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", TenantID: "00000000-0000-0000-0000-000000000000", - Members: []models.Member{{ID: "507f191e810c19729de860ea", Role: "owner"}}, + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, Settings: &models.NamespaceSettings{ - SessionRecord: true, + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, MaxDevices: MaxNumberDevicesUnlimited, CreatedAt: now, } - mock.On("NamespaceCreate", ctx, namespace).Return(nil, errors.New("error")).Once() + mock.On("NamespaceCreate", ctx, namespace).Return("", errors.New("error")).Once() }, expected: Expected{nil, ErrDuplicateNamespace}, }, { - description: "succeeds in creating a namespace when user and namespace data are valid - Community", - namespace: "namespace", - username: "john_doe", - tenant: "00000000-0000-0000-0000-000000000000", + description: "succeeds in creating a namespace when user and namespace data are valid - Community", + namespace: "namespace", + username: "john_doe", + tenant: "00000000-0000-0000-0000-000000000000", + typeNamespace: "", requiredMocks: func() { envMock := &env_mocks.Backend{} envs.DefaultBackend = envMock - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() + envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Twice() envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() user := &models.User{ ID: "507f191e810c19729de860ea", @@ -126,42 +140,59 @@ func TestNamespaceCreate(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() namespace := &models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", TenantID: "00000000-0000-0000-0000-000000000000", - Members: []models.Member{{ID: "507f191e810c19729de860ea", Role: "owner"}}, + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, Settings: &models.NamespaceSettings{ - SessionRecord: true, + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, MaxDevices: MaxNumberDevicesUnlimited, CreatedAt: now, } - mock.On("NamespaceCreate", ctx, namespace).Return(namespace, nil).Once() + mock.On("NamespaceCreate", ctx, namespace).Return("00000000-0000-0000-0000-000000000000", nil).Once() }, expected: Expected{&models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", TenantID: "00000000-0000-0000-0000-000000000000", - Members: []models.Member{{ID: "507f191e810c19729de860ea", Role: "owner"}}, + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, Settings: &models.NamespaceSettings{ - SessionRecord: true, + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, MaxDevices: MaxNumberDevicesUnlimited, CreatedAt: now, }, nil}, }, { - description: "succeeds in creating a namespace when user and namespace data are valid - Cloud", - namespace: "namespace", - username: "john_doe", - tenant: "00000000-0000-0000-0000-000000000000", + description: "succeeds in creating a namespace when user and namespace data are valid - Cloud - type team", + namespace: "namespace", + username: "john_doe", + tenant: "00000000-0000-0000-0000-000000000000", + typeNamespace: "team", requiredMocks: func() { envMock := &env_mocks.Backend{} envs.DefaultBackend = envMock envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Once() + envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() user := &models.User{ ID: "507f191e810c19729de860ea", UserData: models.UserData{ @@ -170,42 +201,120 @@ func TestNamespaceCreate(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() namespace := &models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", TenantID: "00000000-0000-0000-0000-000000000000", - Members: []models.Member{{ID: "507f191e810c19729de860ea", Role: "owner"}}, + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, Settings: &models.NamespaceSettings{ - SessionRecord: true, + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, MaxDevices: MaxNumberDevicesLimited, CreatedAt: now, } - mock.On("NamespaceCreate", ctx, namespace).Return(namespace, nil).Once() + mock.On("NamespaceCreate", ctx, namespace).Return("00000000-0000-0000-0000-000000000000", nil).Once() }, expected: Expected{&models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", TenantID: "00000000-0000-0000-0000-000000000000", - Members: []models.Member{{ID: "507f191e810c19729de860ea", Role: "owner"}}, + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, Settings: &models.NamespaceSettings{ - SessionRecord: true, + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, MaxDevices: MaxNumberDevicesLimited, CreatedAt: now, }, nil}, }, { - description: "succeeds in creating a namespace when user and namespace data are valid - Enterprise", - namespace: "namespace", - username: "john_doe", - tenant: "00000000-0000-0000-0000-000000000000", + description: "succeeds in creating a namespace when user and namespace data are valid - Cloud", + namespace: "namespace", + username: "john_doe", + tenant: "00000000-0000-0000-0000-000000000000", + typeNamespace: "", + requiredMocks: func() { + envMock := &env_mocks.Backend{} + envs.DefaultBackend = envMock + envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("false").Once() + envMock.On("Get", "SHELLHUB_CLOUD").Return("true").Twice() + user := &models.User{ + ID: "507f191e810c19729de860ea", + UserData: models.UserData{ + Name: "John Doe", + Email: "john.doe@test.com", + Username: "john_doe", + }, + } + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() + namespace := &models.Namespace{ + Name: "namespace", + Owner: "507f191e810c19729de860ea", + TenantID: "00000000-0000-0000-0000-000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, + MaxDevices: MaxNumberDevicesLimited, + CreatedAt: now, + } + mock.On("NamespaceCreate", ctx, namespace).Return("00000000-0000-0000-0000-000000000000", nil).Once() + }, + expected: Expected{&models.Namespace{ + Name: "namespace", + Owner: "507f191e810c19729de860ea", + TenantID: "00000000-0000-0000-0000-000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, + MaxDevices: MaxNumberDevicesLimited, + CreatedAt: now, + }, nil}, + }, + { + description: "succeeds in creating a namespace when user and namespace data are valid - Enterprise - type team", + namespace: "namespace", + username: "john_doe", + tenant: "00000000-0000-0000-0000-000000000000", + typeNamespace: "team", requiredMocks: func() { envMock := &env_mocks.Backend{} envs.DefaultBackend = envMock envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("true").Once() - envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Once() + envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Twice() user := &models.User{ ID: "507f191e810c19729de860ea", UserData: models.UserData{ @@ -214,27 +323,104 @@ func TestNamespaceCreate(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() namespace := &models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", TenantID: "00000000-0000-0000-0000-000000000000", - Members: []models.Member{{ID: "507f191e810c19729de860ea", Role: "owner"}}, + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, Settings: &models.NamespaceSettings{ - SessionRecord: true, + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, MaxDevices: MaxNumberDevicesUnlimited, CreatedAt: now, } - mock.On("NamespaceCreate", ctx, namespace).Return(namespace, nil).Once() + mock.On("NamespaceCreate", ctx, namespace).Return("00000000-0000-0000-0000-000000000000", nil).Once() }, expected: Expected{&models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", TenantID: "00000000-0000-0000-0000-000000000000", - Members: []models.Member{{ID: "507f191e810c19729de860ea", Role: "owner"}}, + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, Settings: &models.NamespaceSettings{ - SessionRecord: true, + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, + MaxDevices: MaxNumberDevicesUnlimited, + CreatedAt: now, + }, nil}, + }, + { + description: "succeeds in creating a namespace when user and namespace data are valid - Enterprise", + namespace: "namespace", + username: "john_doe", + tenant: "00000000-0000-0000-0000-000000000000", + typeNamespace: "", + requiredMocks: func() { + envMock := &env_mocks.Backend{} + envs.DefaultBackend = envMock + envMock.On("Get", "SHELLHUB_ENTERPRISE").Return("true").Once() + envMock.On("Get", "SHELLHUB_CLOUD").Return("false").Twice() + user := &models.User{ + ID: "507f191e810c19729de860ea", + UserData: models.UserData{ + Name: "John Doe", + Email: "john.doe@test.com", + Username: "john_doe", + }, + } + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() + namespace := &models.Namespace{ + Name: "namespace", + Owner: "507f191e810c19729de860ea", + TenantID: "00000000-0000-0000-0000-000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, + }, + MaxDevices: MaxNumberDevicesUnlimited, + CreatedAt: now, + } + mock.On("NamespaceCreate", ctx, namespace).Return("00000000-0000-0000-0000-000000000000", nil).Once() + }, + expected: Expected{&models.Namespace{ + Name: "namespace", + Owner: "507f191e810c19729de860ea", + TenantID: "00000000-0000-0000-0000-000000000000", + Type: models.TypeTeam, + Members: []models.Member{ + { + ID: "507f191e810c19729de860ea", + Role: "owner", + AddedAt: now, + }, + }, + Settings: &models.NamespaceSettings{ + SessionRecord: true, + ConnectionAnnouncement: models.DefaultAnnouncementMessage, }, MaxDevices: MaxNumberDevicesUnlimited, CreatedAt: now, @@ -247,12 +433,12 @@ func TestNamespaceCreate(t *testing.T) { tc.requiredMocks() s := NewService(store.Store(mock)) - ns, err := s.NamespaceCreate(ctx, &inputs.NamespaceCreate{Namespace: tc.namespace, Owner: tc.username, TenantID: tc.tenant}) + ns, err := s.NamespaceCreate(ctx, &inputs.NamespaceCreate{Namespace: tc.namespace, Owner: tc.username, TenantID: tc.tenant, Type: tc.typeNamespace}) assert.Equal(t, tc.expected, Expected{ns, err}) + + mock.AssertExpectations(t) }) } - - mock.AssertExpectations(t) } func TestNamespaceAddMember(t *testing.T) { @@ -262,15 +448,18 @@ func TestNamespaceAddMember(t *testing.T) { } mock := new(mocks.Store) + mockClock := new(clockmock.Clock) + clock.DefaultBackend = mockClock ctx := context.TODO() now := time.Now() + mockClock.On("Now").Return(now) cases := []struct { description string username string namespace string - role string + role authorizer.Role requiredMocks func() expected Expected }{ @@ -278,9 +467,9 @@ func TestNamespaceAddMember(t *testing.T) { description: "fails when could not find a user", username: "john", namespace: "namespace", - role: guard.RoleObserver, + role: authorizer.RoleObserver, requiredMocks: func() { - mock.On("UserGetByUsername", ctx, "john").Return(nil, errors.New("error")).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john").Return(nil, errors.New("error")).Once() }, expected: Expected{nil, ErrUserNotFound}, }, @@ -288,7 +477,7 @@ func TestNamespaceAddMember(t *testing.T) { description: "fails when could not find a namespace", username: "john", namespace: "invalid_namespace", - role: guard.RoleObserver, + role: authorizer.RoleObserver, requiredMocks: func() { user := &models.User{ ID: "507f191e810c19729de860ea", @@ -298,8 +487,8 @@ func TestNamespaceAddMember(t *testing.T) { Username: "john", }, } - mock.On("UserGetByUsername", ctx, "john").Return(user, nil).Once() - mock.On("NamespaceGetByName", ctx, "invalid_namespace").Return(nil, errors.New("error")).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john").Return(user, nil).Once() + mock.On("NamespaceResolve", ctx, store.NamespaceNameResolver, "invalid_namespace").Return(nil, errors.New("error")).Once() }, expected: Expected{nil, ErrNamespaceNotFound}, }, @@ -307,7 +496,7 @@ func TestNamespaceAddMember(t *testing.T) { description: "successfully add user to the Namespace", username: "john", namespace: "namespace", - role: guard.RoleObserver, + role: authorizer.RoleObserver, requiredMocks: func() { user := &models.User{ ID: "507f191e810c19729de860ea", @@ -317,7 +506,7 @@ func TestNamespaceAddMember(t *testing.T) { Username: "john", }, } - mock.On("UserGetByUsername", ctx, "john").Return(user, nil).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john").Return(user, nil).Once() namespace := &models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", @@ -328,8 +517,12 @@ func TestNamespaceAddMember(t *testing.T) { }, CreatedAt: now, } - mock.On("NamespaceGetByName", ctx, "namespace").Return(namespace, nil).Once() - mock.On("NamespaceAddMember", ctx, "00000000-0000-0000-0000-000000000000", "507f191e810c19729de860ea", guard.RoleObserver).Return(namespace, nil).Once() + mock.On("NamespaceResolve", ctx, store.NamespaceNameResolver, "namespace").Return(namespace, nil).Once() + mock.On("NamespaceCreateMembership", ctx, "00000000-0000-0000-0000-000000000000", &models.Member{ + ID: "507f191e810c19729de860ea", + Role: authorizer.RoleObserver, + AddedAt: now, + }).Return(nil).Once() }, expected: Expected{&models.Namespace{ Name: "namespace", @@ -380,7 +573,7 @@ func TestNamespaceRemoveMember(t *testing.T) { username: "john_doe", namespace: "namespace", requiredMocks: func() { - mock.On("UserGetByUsername", ctx, "john_doe").Return(nil, errors.New("error")).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(nil, errors.New("error")).Once() }, expected: Expected{nil, ErrUserNotFound}, }, @@ -397,8 +590,8 @@ func TestNamespaceRemoveMember(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() - mock.On("NamespaceGetByName", ctx, "namespace").Return(nil, errors.New("error")).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() + mock.On("NamespaceResolve", ctx, store.NamespaceNameResolver, "namespace").Return(nil, errors.New("error")).Once() }, expected: Expected{nil, ErrNamespaceNotFound}, }, @@ -415,7 +608,7 @@ func TestNamespaceRemoveMember(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() namespace := &models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", @@ -426,8 +619,8 @@ func TestNamespaceRemoveMember(t *testing.T) { }, CreatedAt: now, } - mock.On("NamespaceGetByName", ctx, "namespace").Return(namespace, nil).Once() - mock.On("NamespaceRemoveMember", ctx, "00000000-0000-0000-0000-000000000000", "507f191e810c19729de860ea").Return(nil, errors.New("error")).Once() + mock.On("NamespaceResolve", ctx, store.NamespaceNameResolver, "namespace").Return(namespace, nil).Once() + mock.On("NamespaceDeleteMembership", ctx, "00000000-0000-0000-0000-000000000000", &models.Member{ID: "507f191e810c19729de860ea", Role: "owner"}).Return(errors.New("error")).Once() }, expected: Expected{nil, ErrFailedNamespaceRemoveMember}, }, @@ -444,7 +637,7 @@ func TestNamespaceRemoveMember(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() namespace := &models.Namespace{ Name: "namespace", Owner: "507f191e810c19729de860ea", @@ -455,8 +648,8 @@ func TestNamespaceRemoveMember(t *testing.T) { }, CreatedAt: now, } - mock.On("NamespaceGetByName", ctx, "namespace").Return(namespace, nil).Once() - mock.On("NamespaceRemoveMember", ctx, "00000000-0000-0000-0000-000000000000", "507f191e810c19729de860ea").Return(namespace, nil).Once() + mock.On("NamespaceResolve", ctx, store.NamespaceNameResolver, "namespace").Return(namespace, nil).Once() + mock.On("NamespaceDeleteMembership", ctx, "00000000-0000-0000-0000-000000000000", &models.Member{ID: "507f191e810c19729de860ea", Role: "owner"}).Return(nil).Once() }, expected: Expected{&models.Namespace{ Name: "namespace", @@ -499,7 +692,7 @@ func TestNamespaceDelete(t *testing.T) { description: "fails when could not find a namespace", namespace: "namespace", requiredMocks: func() { - mock.On("NamespaceGetByName", ctx, "namespace").Return(nil, errors.New("error")).Once() + mock.On("NamespaceResolve", ctx, store.NamespaceNameResolver, "namespace").Return(nil, errors.New("error")).Once() }, expected: ErrNamespaceNotFound, }, @@ -518,8 +711,8 @@ func TestNamespaceDelete(t *testing.T) { MaxDevices: MaxNumberDevicesUnlimited, CreatedAt: clock.Now(), } - mock.On("NamespaceGetByName", ctx, "namespace").Return(namespace, nil).Once() - mock.On("NamespaceDelete", ctx, "00000000-0000-0000-0000-000000000000").Return(errors.New("error")).Once() + mock.On("NamespaceResolve", ctx, store.NamespaceNameResolver, "namespace").Return(namespace, nil).Once() + mock.On("NamespaceDelete", ctx, namespace).Return(errors.New("error")).Once() }, expected: ErrFailedDeleteNamespace, }, @@ -538,8 +731,8 @@ func TestNamespaceDelete(t *testing.T) { MaxDevices: MaxNumberDevicesUnlimited, CreatedAt: clock.Now(), } - mock.On("NamespaceGetByName", ctx, "namespace").Return(namespace, nil).Once() - mock.On("NamespaceDelete", ctx, "00000000-0000-0000-0000-000000000000").Return(nil).Once() + mock.On("NamespaceResolve", ctx, store.NamespaceNameResolver, "namespace").Return(namespace, nil).Once() + mock.On("NamespaceDelete", ctx, namespace).Return(nil).Once() }, expected: nil, }, diff --git a/cli/services/users.go b/cli/services/users.go index 7d3b09925b6..54b1a19969e 100644 --- a/cli/services/users.go +++ b/cli/services/users.go @@ -2,6 +2,8 @@ package services import ( "context" + "slices" + "strings" "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/cli/pkg/inputs" @@ -12,56 +14,69 @@ import ( // UserCreate adds a new user based on the provided user's data. This method validates data and // checks for conflicts. func (s *service) UserCreate(ctx context.Context, input *inputs.UserCreate) (*models.User, error) { + // TODO: convert username and email to lower case. userData := models.UserData{ Name: input.Username, Email: input.Email, Username: input.Username, } + // TODO: validate this at cmd layer if ok, err := s.validator.Struct(userData); !ok || err != nil { return nil, ErrUserDataInvalid } - userPassword := models.NewUserPassword(input.Password) + if conflicts, has, _ := s.store.UserConflicts(ctx, &models.UserConflicts{Email: userData.Email, Username: userData.Username}); has { + containsEmail := slices.Contains(conflicts, "email") + containsUsername := slices.Contains(conflicts, "username") + + switch { + case containsUsername && containsEmail: + return nil, ErrUserNameAndEmailExists + case containsUsername: + return nil, ErrUserNameExists + case containsEmail: + return nil, ErrUserEmailExists + default: + return nil, ErrUserUnhandledDuplicate + } + } - if ok, err := s.validator.Struct(userPassword); !ok || err != nil { + password, err := models.HashUserPassword(input.Password) + if err != nil { + return nil, ErrUserPasswordInvalid + } + + // TODO: validate this at cmd layer + if ok, err := s.validator.Struct(password); !ok || err != nil { return nil, ErrUserPasswordInvalid } user := &models.User{ + Origin: models.UserOriginLocal, UserData: userData, - UserPassword: userPassword, - Confirmed: true, + Password: password, + Status: models.UserStatusConfirmed, CreatedAt: clock.Now(), MaxNamespaces: MaxNumberNamespacesCommunity, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: input.Admin, } - if err := s.store.UserCreate(ctx, user); err != nil { - // searches for conflicts in database - if err == store.ErrDuplicate { - var usernameExists, emailExists bool - if u, _ := s.store.UserGetByUsername(ctx, user.Username); u != nil { - usernameExists = true - } - if u, _ := s.store.UserGetByEmail(ctx, user.Email); u != nil { - emailExists = true - } - - switch { - case usernameExists && emailExists: - return nil, ErrUserNameAndEmailExists - case usernameExists: - return nil, ErrUserNameExists - case emailExists: - return nil, ErrUserEmailExists - default: - return nil, ErrUserUnhandledDuplicate - } - } - + if _, err := s.store.UserCreate(ctx, user); err != nil { return nil, ErrCreateNewUser } + system, err := s.store.SystemGet(ctx) + if err != nil { + system = &models.System{} + } + + system.Setup = true + _ = s.store.SystemSet(ctx, system) + return user, nil } @@ -71,31 +86,33 @@ func (s *service) UserDelete(ctx context.Context, input *inputs.UserDelete) erro return ErrUserDataInvalid } - user, err := s.store.UserGetByUsername(ctx, input.Username) + user, err := s.store.UserResolve(ctx, store.UserUsernameResolver, strings.ToLower(input.Username)) if err != nil { return ErrUserNotFound } - detach, err := s.store.UserDetachInfo(ctx, user.ID) + userInfo, err := s.store.UserGetInfo(ctx, user.ID) if err != nil { return ErrNamespaceNotFound } - // Delete all namespaces what the user is owner. - for _, ns := range detach["owner"] { - if err := s.store.NamespaceDelete(ctx, ns.TenantID); err != nil { - return err - } + ownedNamespaces := make([]string, len(userInfo.OwnedNamespaces)) + for i, namespace := range userInfo.OwnedNamespaces { + ownedNamespaces[i] = namespace.TenantID + } + + if _, err := s.store.NamespaceDeleteMany(ctx, ownedNamespaces); err != nil { + return err } - // Remove user from all namespaces what it is a member. - for _, ns := range detach["member"] { - if _, err := s.store.NamespaceRemoveMember(ctx, ns.TenantID, user.ID); err != nil { + for _, ns := range userInfo.AssociatedNamespaces { + member := &models.Member{ID: user.ID} + if err := s.store.NamespaceDeleteMembership(ctx, ns.TenantID, member); err != nil { return err } } - if err := s.store.UserDelete(ctx, user.ID); err != nil { + if err := s.store.UserDelete(ctx, user); err != nil { return ErrFailedDeleteUser } @@ -108,18 +125,24 @@ func (s *service) UserUpdate(ctx context.Context, input *inputs.UserUpdate) erro return ErrUserDataInvalid } - password := models.NewUserPassword(input.Password) + user, err := s.store.UserResolve(ctx, store.UserUsernameResolver, strings.ToLower(input.Username)) + if err != nil { + return ErrUserNotFound + } - if ok, err := s.validator.Struct(password); !ok || err != nil { + password, err := models.HashUserPassword(input.Password) + if err != nil { return ErrUserPasswordInvalid } - user, err := s.store.UserGetByUsername(ctx, input.Username) - if err != nil { - return ErrUserNotFound + // TODO: validate this at cmd layer + if ok, err := s.validator.Struct(password); !ok || err != nil { + return ErrUserPasswordInvalid } - if err := s.store.UserUpdatePassword(ctx, password.HashedPassword, user.ID); err != nil { + user.Password = password + + if err := s.store.UserUpdate(ctx, user); err != nil { return ErrFailedUpdateUser } diff --git a/cli/services/users_test.go b/cli/services/users_test.go index 51727842f68..24f32b87192 100644 --- a/cli/services/users_test.go +++ b/cli/services/users_test.go @@ -5,12 +5,14 @@ import ( "errors" "testing" - "github.com/shellhub-io/shellhub/api/pkg/guard" "github.com/shellhub-io/shellhub/api/store" "github.com/shellhub-io/shellhub/api/store/mocks" "github.com/shellhub-io/shellhub/cli/pkg/inputs" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" "github.com/shellhub-io/shellhub/pkg/clock" clockmock "github.com/shellhub-io/shellhub/pkg/clock/mocks" + "github.com/shellhub-io/shellhub/pkg/hash" + hashmock "github.com/shellhub-io/shellhub/pkg/hash/mocks" "github.com/shellhub-io/shellhub/pkg/models" "github.com/stretchr/testify/assert" ) @@ -22,6 +24,8 @@ func TestUserCreate(t *testing.T) { } mock := new(mocks.Store) + hashMock := &hashmock.Hasher{} + hash.Backend = hashMock ctx := context.TODO() now := clock.Now() @@ -41,7 +45,7 @@ func TestUserCreate(t *testing.T) { description: "fails when email is invalid", username: "john_doe", email: "invalidmail.com", - password: "password", + password: "secret", requiredMocks: func() { }, expected: Expected{nil, ErrUserDataInvalid}, @@ -50,51 +54,21 @@ func TestUserCreate(t *testing.T) { description: "fails when username is invalid", username: "", email: "john.doe@test.com", - password: "password", + password: "secret", requiredMocks: func() { }, expected: Expected{nil, ErrUserDataInvalid}, }, - { - description: "fails when the password is invalid", - username: "john_doe", - email: "john.doe@test.com", - password: "ab", - requiredMocks: func() { - }, - expected: Expected{nil, ErrUserPasswordInvalid}, - }, { description: "fails when email is duplicated", username: "john_doe", email: "john.doe@test.com", - password: "password", + password: "secret", requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "john_doe", - Email: "john.doe@test.com", - Username: "john_doe", - }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, - CreatedAt: clock.Now(), - MaxNamespaces: MaxNumberNamespacesCommunity, - } - mock.On("UserCreate", ctx, user).Return(store.ErrDuplicate).Once() - currentUser := &models.User{ - UserData: models.UserData{ - Name: "jane_doe", - Email: "john.doe@test.com", - Username: "jane_doe", - }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, - CreatedAt: clock.Now(), - MaxNamespaces: MaxNumberNamespacesCommunity, - } - mock.On("UserGetByUsername", ctx, "john_doe").Return(nil, errors.New("error")).Once() - mock.On("UserGetByEmail", ctx, "john.doe@test.com").Return(currentUser, nil).Once() + mock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{"email"}, true, nil). + Once() }, expected: Expected{nil, ErrUserEmailExists}, }, @@ -102,33 +76,12 @@ func TestUserCreate(t *testing.T) { description: "fails when username is duplicated", username: "john_doe", email: "john.doe@test.com", - password: "password", + password: "secret", requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "john_doe", - Email: "john.doe@test.com", - Username: "john_doe", - }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, - CreatedAt: clock.Now(), - MaxNamespaces: MaxNumberNamespacesCommunity, - } - mock.On("UserCreate", ctx, user).Return(store.ErrDuplicate).Once() - currentUser := &models.User{ - UserData: models.UserData{ - Name: "john_doe", - Email: "jane.doe@test.com", - Username: "john_doe", - }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, - CreatedAt: clock.Now(), - MaxNamespaces: MaxNumberNamespacesCommunity, - } - mock.On("UserGetByUsername", ctx, "john_doe").Return(currentUser, nil).Once() - mock.On("UserGetByEmail", ctx, "john.doe@test.com").Return(nil, errors.New("error")).Once() + mock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{"username"}, true, nil). + Once() }, expected: Expected{nil, ErrUserNameExists}, }, @@ -136,77 +89,67 @@ func TestUserCreate(t *testing.T) { description: "fails when email and username is duplicated", username: "john_doe", email: "john.doe@test.com", - password: "password", + password: "secret", requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "john_doe", - Email: "john.doe@test.com", - Username: "john_doe", - }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, - CreatedAt: clock.Now(), - MaxNamespaces: MaxNumberNamespacesCommunity, - } - mock.On("UserCreate", ctx, user).Return(store.ErrDuplicate).Once() - currentUser := &models.User{ - UserData: models.UserData{ - Name: "john_doe", - Email: "john.doe@test.com", - Username: "john_doe", - }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, - CreatedAt: clock.Now(), - MaxNamespaces: MaxNumberNamespacesCommunity, - } - mock.On("UserGetByUsername", ctx, "john_doe").Return(currentUser, nil).Once() - mock.On("UserGetByEmail", ctx, "john.doe@test.com").Return(currentUser, nil).Once() + mock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{"username", "email"}, true, nil). + Once() }, expected: Expected{nil, ErrUserNameAndEmailExists}, }, { - description: "fails when some field is duplicated but unhandled", + description: "fails when the password is invalid", username: "john_doe", email: "john.doe@test.com", - password: "password", + password: "secret", requiredMocks: func() { - user := &models.User{ - UserData: models.UserData{ - Name: "john_doe", - Email: "john.doe@test.com", - Username: "john_doe", - }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, - CreatedAt: clock.Now(), - MaxNamespaces: MaxNumberNamespacesCommunity, - } - mock.On("UserCreate", ctx, user).Return(store.ErrDuplicate).Once() - mock.On("UserGetByUsername", ctx, "john_doe").Return(nil, nil).Once() - mock.On("UserGetByEmail", ctx, "john.doe@test.com").Return(nil, nil).Once() + mock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{}, false, nil). + Once() + hashMock. + On("Do", "secret"). + Return("", errors.New("error")). + Once() }, - expected: Expected{nil, ErrUserUnhandledDuplicate}, + expected: Expected{nil, ErrUserPasswordInvalid}, }, { description: "fails creates a user", username: "john_doe", email: "john.doe@test.com", - password: "password", + password: "secret", requiredMocks: func() { + mock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{}, false, nil). + Once() + hashMock. + On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", nil). + Once() + user := &models.User{ + Origin: models.UserOriginLocal, UserData: models.UserData{ Name: "john_doe", Email: "john.doe@test.com", Username: "john_doe", }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + Status: models.UserStatusConfirmed, CreatedAt: clock.Now(), MaxNamespaces: MaxNumberNamespacesCommunity, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: false, } - mock.On("UserCreate", ctx, user).Return(errors.New("error")).Once() + mock.On("UserCreate", ctx, user).Return("", errors.New("error")).Once() }, expected: Expected{nil, ErrCreateNewUser}, }, @@ -214,31 +157,60 @@ func TestUserCreate(t *testing.T) { description: "successfully creates a user", username: "john_doe", email: "john.doe@test.com", - password: "password", + password: "secret", requiredMocks: func() { + mock. + On("UserConflicts", ctx, &models.UserConflicts{Username: "john_doe", Email: "john.doe@test.com"}). + Return([]string{}, false, nil). + Once() + hashMock. + On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", nil). + Once() + user := &models.User{ + Origin: models.UserOriginLocal, UserData: models.UserData{ Name: "john_doe", Email: "john.doe@test.com", Username: "john_doe", }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + Status: models.UserStatusConfirmed, CreatedAt: clock.Now(), MaxNamespaces: MaxNumberNamespacesCommunity, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: false, } - mock.On("UserCreate", ctx, user).Return(nil).Once() + + mock.On("UserCreate", ctx, user).Return("000000000000000000000000", nil).Once() + + mock.On("SystemGet", ctx).Return(&models.System{Setup: false}, nil).Once() + mock.On("SystemSet", ctx, &models.System{Setup: true}).Return(nil).Once() }, expected: Expected{&models.User{ + Origin: models.UserOriginLocal, UserData: models.UserData{ Name: "john_doe", Email: "john.doe@test.com", Username: "john_doe", }, - UserPassword: models.NewUserPassword("password"), - Confirmed: true, + Password: models.UserPassword{ + Plain: "secret", + Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YVVCIa2UYuFV4OJby7Yi", + }, + Status: models.UserStatusConfirmed, CreatedAt: clock.Now(), MaxNamespaces: MaxNumberNamespacesCommunity, + Preferences: models.UserPreferences{ + AuthMethods: []models.UserAuthMethod{models.UserAuthMethodLocal}, + }, + Admin: false, }, nil}, }, } @@ -271,7 +243,7 @@ func TestUserDelete(t *testing.T) { description: "fails when could not find a user", username: "john_doe", requiredMocks: func() { - mock.On("UserGetByUsername", ctx, "john_doe").Return(nil, errors.New("error")).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(nil, errors.New("error")).Once() }, expected: ErrUserNotFound, }, @@ -287,8 +259,8 @@ func TestUserDelete(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() - mock.On("UserDetachInfo", ctx, "507f191e810c19729de860ea").Return(nil, errors.New("error")).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() + mock.On("UserGetInfo", ctx, "507f191e810c19729de860ea").Return(nil, errors.New("error")).Once() }, expected: ErrNamespaceNotFound, }, @@ -304,9 +276,9 @@ func TestUserDelete(t *testing.T) { Username: "john_doe", }, } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() - namespaceOwned := []*models.Namespace{ + namespaceOwned := []models.Namespace{ { Name: "namespace1", Owner: "507f191e810c19729de860ea", @@ -328,14 +300,14 @@ func TestUserDelete(t *testing.T) { CreatedAt: clock.Now(), }, } - namespaceMember := []*models.Namespace{ + namespaceMember := []models.Namespace{ { Name: "namespace3", Owner: "507f191e810c19729de86000", TenantID: "30000000-0000-0000-0000-000000000000", Members: []models.Member{ - {ID: "507f191e810c19729de86000", Role: guard.RoleObserver}, - {ID: "507f191e810c19729de860ea", Role: guard.RoleObserver}, + {ID: "507f191e810c19729de86000", Role: authorizer.RoleObserver}, + {ID: "507f191e810c19729de860ea", Role: authorizer.RoleObserver}, }, Settings: &models.NamespaceSettings{ SessionRecord: true, @@ -347,8 +319,8 @@ func TestUserDelete(t *testing.T) { Owner: "507f191e810c19729de86000", TenantID: "tenantID1", Members: []models.Member{ - {ID: "507f191e810c19729de86000", Role: guard.RoleObserver}, - {ID: "507f191e810c19729de860ea", Role: guard.RoleObserver}, + {ID: "507f191e810c19729de86000", Role: authorizer.RoleObserver}, + {ID: "507f191e810c19729de860ea", Role: authorizer.RoleObserver}, }, Settings: &models.NamespaceSettings{ SessionRecord: true, @@ -357,19 +329,17 @@ func TestUserDelete(t *testing.T) { }, } - mock.On("UserDetachInfo", ctx, "507f191e810c19729de860ea").Return(map[string][]*models.Namespace{ - "owner": namespaceOwned, - "member": namespaceMember, + mock.On("UserGetInfo", ctx, "507f191e810c19729de860ea").Return(&models.UserInfo{ + OwnedNamespaces: namespaceOwned, + AssociatedNamespaces: namespaceMember, }, nil) - for _, v := range namespaceOwned { - mock.On("NamespaceDelete", ctx, v.TenantID).Return(nil).Once() - } + mock.On("NamespaceDeleteMany", ctx, []string{"10000000-0000-0000-0000-000000000000", "20000000-0000-0000-0000-000000000000"}).Return(int64(2), nil).Once() for _, v := range namespaceMember { - mock.On("NamespaceRemoveMember", ctx, v.TenantID, "507f191e810c19729de860ea").Return(nil, nil).Once() + mock.On("NamespaceDeleteMembership", ctx, v.TenantID, &models.Member{ID: "507f191e810c19729de860ea"}).Return(nil).Once() } - mock.On("UserDelete", ctx, "507f191e810c19729de860ea").Return(nil).Once() + mock.On("UserDelete", ctx, user).Return(nil).Once() }, expected: nil, }, @@ -390,6 +360,9 @@ func TestUserDelete(t *testing.T) { func TestUserResetPassword(t *testing.T) { mock := new(mocks.Store) + hashMock := &hashmock.Hasher{} + hash.Backend = hashMock + ctx := context.TODO() cases := []struct { @@ -399,60 +372,60 @@ func TestUserResetPassword(t *testing.T) { requiredMocks func() expected error }{ - { - description: "fails when the password is invalid", - username: "john_doe", - password: "ab", - requiredMocks: func() { - }, - expected: ErrUserPasswordInvalid, - }, { description: "fails when could not find a user", username: "john_doe", password: "password", requiredMocks: func() { - mock.On("UserGetByUsername", ctx, "john_doe").Return(nil, errors.New("error")).Once() + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(nil, errors.New("error")).Once() }, expected: ErrUserNotFound, }, { description: "fails to reset the user password", username: "john_doe", - password: "password", + password: "secret", requiredMocks: func() { - password := models.NewUserPassword("password") - user := &models.User{ - ID: "507f191e810c19729de860ea", - UserData: models.UserData{ - Name: "John Doe", - Email: "john.doe@test.com", - Username: "john_doe", - }, - UserPassword: password, - } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() - mock.On("UserUpdatePassword", ctx, password.HashedPassword, "507f191e810c19729de860ea").Return(errors.New("error")).Once() + user := &models.User{ID: "507f191e810c19729de860ea"} + + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() + + hashMock. + On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", nil). + Once() + + expectedUser := *user + expectedUser.Password = models.UserPassword{Plain: "secret", Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"} + + mock. + On("UserUpdate", ctx, &expectedUser). + Return(errors.New("error")). + Once() }, expected: ErrFailedUpdateUser, }, { description: "successfully reset the user password", username: "john_doe", - password: "password", + password: "secret", requiredMocks: func() { - password := models.NewUserPassword("password") - user := &models.User{ - ID: "507f191e810c19729de860ea", - UserData: models.UserData{ - Name: "John Doe", - Email: "john.doe@test.com", - Username: "john_doe", - }, - UserPassword: password, - } - mock.On("UserGetByUsername", ctx, "john_doe").Return(user, nil).Once() - mock.On("UserUpdatePassword", ctx, password.HashedPassword, "507f191e810c19729de860ea").Return(nil).Once() + user := &models.User{ID: "507f191e810c19729de860ea"} + + mock.On("UserResolve", ctx, store.UserUsernameResolver, "john_doe").Return(user, nil).Once() + + hashMock. + On("Do", "secret"). + Return("$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi", nil). + Once() + + expectedUser := *user + expectedUser.Password = models.UserPassword{Plain: "secret", Hash: "$2a$10$V/6N1wsjheBVvWosPfv02uf4WAOb9lmp8YWQCIa2UYuFV4OJby7Yi"} + + mock. + On("UserUpdate", ctx, &expectedUser). + Return(nil). + Once() }, expected: nil, }, diff --git a/cli/services/utils.go b/cli/services/utils.go new file mode 100644 index 00000000000..da7f1f7f760 --- /dev/null +++ b/cli/services/utils.go @@ -0,0 +1,13 @@ +package services + +import "github.com/shellhub-io/shellhub/pkg/envs" + +// getMaxDevices get the limit of devices that a namespace can have if environment +// is cloud. +func getMaxDevices() int { + if envs.IsCloud() { + return MaxNumberDevicesLimited + } + + return MaxNumberDevicesUnlimited +} diff --git a/connector/Dockerfile b/connector/Dockerfile deleted file mode 100644 index 68c51221be2..00000000000 --- a/connector/Dockerfile +++ /dev/null @@ -1,72 +0,0 @@ -# base stage -FROM golang:1.20.4-alpine3.16 AS base - -ARG GOPROXY - -RUN apk add --update git ca-certificates build-base bash util-linux perl xz - -# We are using libxcrypt to support yescrypt password hashing method -# Since libxcrypt package is not available in Alpine, so we need to build libxcrypt from source code -RUN wget -q https://github.com/besser82/libxcrypt/releases/download/v4.4.27/libxcrypt-4.4.27.tar.xz && \ - tar xvf libxcrypt-4.4.27.tar.xz && cd libxcrypt-4.4.27 && \ - ./configure --prefix /usr && make -j$(nproc) && make install && \ - cd .. && rm -rf libxcrypt-4.4.27* - -RUN ln -sf /bin/bash /bin/sh - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub - -COPY ./go.mod ./ - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/connector - -COPY ./connector/go.mod ./connector/go.sum ./ - -RUN go mod download - -# builder stage -FROM base AS builder - -ARG SHELLHUB_VERSION=latest -ARG GOPROXY - -COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg -COPY ./connector . - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub - -RUN go mod download - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/connector - -RUN go build -tags docker -ldflags "-X main.ConnectorVersion=${SHELLHUB_VERSION}" - -# development stage -FROM base AS development - -ARG GOPROXY -ENV GOPROXY ${GOPROXY} - -RUN apk add --update openssl openssh-client -RUN go install github.com/markbates/refresh@v1.11.1 && \ - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.53.3 - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub - -RUN go mod download - -COPY ./connector/entrypoint-dev.sh /entrypoint.sh - -WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/connector - -ENTRYPOINT ["/entrypoint.sh"] - -# production stage -FROM alpine:3.18.4 AS production - -COPY --from=0 /usr/lib/libcrypt.so* /usr/lib/ - -WORKDIR /app -COPY --from=builder /go/src/github.com/shellhub-io/shellhub/connector/connector /connector - -ENTRYPOINT /connector diff --git a/connector/entrypoint-dev.sh b/connector/entrypoint-dev.sh deleted file mode 100755 index 174fcb3ecdb..00000000000 --- a/connector/entrypoint-dev.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh - -refresh run diff --git a/connector/go.mod b/connector/go.mod deleted file mode 100644 index 1d87cccddfa..00000000000 --- a/connector/go.mod +++ /dev/null @@ -1,57 +0,0 @@ -module github.com/shellhub-io/shellhub/connector - -go 1.20 - -require ( - github.com/shellhub-io/shellhub v0.13.0-rc.6.0.20231026135513-f00f02afa3d1 - github.com/sirupsen/logrus v1.9.3 - github.com/spf13/cobra v1.7.0 -) - -require ( - github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect - github.com/GehirnInc/crypt v0.0.0-20230320061759-8cc1b52080c5 // indirect - github.com/Masterminds/semver v1.5.0 // indirect - github.com/Microsoft/go-winio v0.6.1 // indirect - github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect - github.com/creack/pty v1.1.18 // indirect - github.com/distribution/reference v0.5.0 // indirect - github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker v24.0.7+incompatible // indirect - github.com/docker/go-connections v0.4.0 // indirect - github.com/docker/go-units v0.5.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.3 // indirect - github.com/gliderlabs/ssh v0.3.5 // indirect - github.com/go-playground/locales v0.14.1 // indirect - github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/go-playground/validator/v10 v10.15.5 // indirect - github.com/go-resty/resty/v2 v2.11.0 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang-jwt/jwt/v4 v4.5.0 // indirect - github.com/gorilla/websocket v1.5.0 // indirect - github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/kr/fs v0.1.0 // indirect - github.com/labstack/echo/v4 v4.11.2 // indirect - github.com/labstack/gommon v0.4.0 // indirect - github.com/leodido/go-urn v1.2.4 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect - github.com/mattn/go-shellwords v1.0.12 // indirect - github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.0.2 // indirect - github.com/pkg/errors v0.9.1 // indirect - github.com/pkg/sftp v1.13.6 // indirect - github.com/sethvargo/go-envconfig v0.9.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect - github.com/valyala/bytebufferpool v1.0.0 // indirect - github.com/valyala/fasttemplate v1.2.2 // indirect - golang.org/x/crypto v0.17.0 // indirect - golang.org/x/mod v0.13.0 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sys v0.15.0 // indirect - golang.org/x/text v0.14.0 // indirect - golang.org/x/tools v0.14.0 // indirect - gotest.tools/v3 v3.5.1 // indirect -) - -replace github.com/shellhub-io/shellhub => ../ diff --git a/connector/go.sum b/connector/go.sum deleted file mode 100644 index f17ad5249cc..00000000000 --- a/connector/go.sum +++ /dev/null @@ -1,202 +0,0 @@ -github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= -github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/GehirnInc/crypt v0.0.0-20230320061759-8cc1b52080c5 h1:IEjq88XO4PuBDcvmjQJcQGg+w+UaafSy8G5Kcb5tBhI= -github.com/GehirnInc/crypt v0.0.0-20230320061759-8cc1b52080c5/go.mod h1:exZ0C/1emQJAw5tHOaUDyY1ycttqBAPcxuzf7QbY6ec= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= -github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= -github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= -github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0= -github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= -github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= -github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM= -github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= -github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= -github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= -github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= -github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= -github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY= -github.com/gliderlabs/ssh v0.3.5/go.mod h1:8XB4KraRrX39qHhT6yxPsHedjA08I/uBVwj4xC+/+z4= -github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= -github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= -github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= -github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= -github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= -github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= -github.com/go-resty/resty/v2 v2.11.0 h1:i7jMfNOJYMp69lq7qozJP+bjgzfAzeOhuGlyDrqxT/8= -github.com/go-resty/resty/v2 v2.11.0/go.mod h1:iiP/OpA0CkcL3IGt1O0+/SIItFUbkkyw5BGXiVdTu+A= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= -github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= -github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/labstack/echo/v4 v4.11.2 h1:T+cTLQxWCDfqDEoydYm5kCobjmHwOwcv4OJAPHilmdE= -github.com/labstack/echo/v4 v4.11.2/go.mod h1:UcGuQ8V6ZNRmSweBIJkPvGfwCMIlFmiqrPqiEBfPYws= -github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8= -github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= -github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= -github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= -github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= -github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= -github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= -github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= -github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM= -github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo= -github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/sethvargo/go-envconfig v0.9.0 h1:Q6FQ6hVEeTECULvkJZakq3dZMeBQ3JUpcKMfPQbKMDE= -github.com/sethvargo/go-envconfig v0.9.0/go.mod h1:Iz1Gy1Sf3T64TQlJSvee81qDhf7YIlt8GMUX6yyNFs0= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= -github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= -github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY= -golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220825204002-c680a09ffe64/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= -golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= -golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc= -golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= -gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= diff --git a/connector/main.go b/connector/main.go deleted file mode 100644 index 0bf13ef5911..00000000000 --- a/connector/main.go +++ /dev/null @@ -1,87 +0,0 @@ -package main - -import ( - "path" - - "github.com/shellhub-io/shellhub/pkg/agent/connector" - "github.com/shellhub-io/shellhub/pkg/envs" - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" -) - -// Config provides the configuration for the agent connector service. -type Config struct { - // Set the ShellHub server address the agent will use to connect. - // This is required. - ServerAddress string `env:"SERVER_ADDRESS,required"` - - // Specify the path to store the devices/containers private keys. - // If not provided, the agent will generate a new one. - // This is required. - PrivateKeys string `env:"PRIVATE_KEYS,required"` - - // Sets the account tenant id used during communication to associate the - // devices to a specific tenant. - // This is required. - TenantID string `env:"TENANT_ID,required"` - - // Determine the interval to send the keep alive message to the server. This - // has a direct impact of the bandwidth used by the device when in idle - // state. Default is 30 seconds. - KeepAliveInterval int `env:"KEEPALIVE_INTERVAL,default=30"` -} - -// ConnectorVersion store the version to be embed inside the binary. This is -// injected using `-ldflags` build option (e.g: `go build -ldflags "-X -// main.ConnectorVersion=1.2.3"`). -var ConnectorVersion string - -func main() { - rootCmd := &cobra.Command{ // nolint: exhaustruct - Use: "docker", - Short: "Starts the Docker Connector", - Long: "Starts the Docker Connector, a service what turns all containers in a docker engine into a ShelHub device", - Run: func(cmd *cobra.Command, args []string) { - cfg, err := envs.ParseWithPrefix[Config]("SHELLHUB_") - if err != nil { - log.Fatal(err) - } - - cfg.PrivateKeys = path.Dir(cfg.PrivateKeys) - - log.WithFields(log.Fields{ - "address": cfg.ServerAddress, - "tenant_id": cfg.TenantID, - "private_keys": cfg.PrivateKeys, - "version": ConnectorVersion, - }).Info("Starting ShellHub Docker Connector") - - connector.ConnectorVersion = ConnectorVersion - connector, err := connector.NewDockerConnector(cfg.ServerAddress, cfg.TenantID, cfg.PrivateKeys) - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "address": cfg.ServerAddress, - "tenant_id": cfg.TenantID, - "version": ConnectorVersion, - }).Fatal("Failed to create ShellHub Docker Connector") - } - - if err := connector.Listen(cmd.Context()); err != nil { - log.WithError(err).WithFields(log.Fields{ - "address": cfg.ServerAddress, - "tenant_id": cfg.TenantID, - "version": ConnectorVersion, - }).Fatal("Failed to listen for connections") - } - - log.WithFields(log.Fields{ - "address": cfg.ServerAddress, - "tenant_id": cfg.TenantID, - "version": ConnectorVersion, - }).Info("ShellHub Docker Connector stopped") - }, - } - - rootCmd.Version = ConnectorVersion - rootCmd.Execute() // nolint: errcheck -} diff --git a/connector/refresh.yml b/connector/refresh.yml deleted file mode 100644 index 012c9c160d3..00000000000 --- a/connector/refresh.yml +++ /dev/null @@ -1,14 +0,0 @@ -app_root: /go/src/github.com/shellhub-io/shellhub -ignored_folders: - - vendor -included_extensions: - - .go -build_target_path: "" -build_path: /go/src/github.com/shellhub-io/shellhub/connector -build_flags: ["-tags", "docker", "-ldflags", "-X main.ConnectorVersion=latest"] -build_delay: 200ns -binary_name: connector -command_flags: ["docker"] -command_env: [] -enable_colors: true -log_name: "" diff --git a/devscripts/README.md b/devscripts/README.md index b3030ee7d19..9473e188bcb 100644 --- a/devscripts/README.md +++ b/devscripts/README.md @@ -17,3 +17,5 @@ They are not intented for regular use by end users. * `lint-code`: Run code linter * `test-unit`: Run unit test * `gen-mock`: Generate/update mock objects for testing +* `run-agent`: Runs a native agent, building if necessary, with the provided tag. +* `update-go`: Updates the Go version of the project to . diff --git a/devscripts/add-device b/devscripts/add-device index 125ad3238b4..68268bb2415 100755 --- a/devscripts/add-device +++ b/devscripts/add-device @@ -10,7 +10,7 @@ MACADDR=$(echo -n 02; dd bs=1 count=5 if=/dev/random 2>/dev/null | hexdump -v -e PRIVATE_KEY_FILE=$(mktemp -u) PUBLIC_KEY_FILE=$(mktemp -u) -openssl genrsa -out $PRIVATE_KEY_FILE 2048 2> /dev/null +openssl genpkey -algorithm RSA -out $PRIVATE_KEY_FILE -pkeyopt rsa_keygen_bits:2048 2> /dev/null openssl rsa -in $PRIVATE_KEY_FILE -out $PUBLIC_KEY_FILE -pubout 2> /dev/null PUBLIC_KEY=$(cat $PUBLIC_KEY_FILE) @@ -24,7 +24,8 @@ JSON=$(cat < -Commands: - generate Run generation of all OpenAPI from instances -Options: - --help Display this help message -EOF - exit 1 -} - -generate() { - option=$1 - - case $option in - -h|--help) - echo "Usage: $0 generate" - echo "Generate a typescript from the OpenAPI spec" - return - ;; - esac - - ./bin/docker-compose \ - exec ui \ - openapi-generator-cli generate \ - --skip-validate-spec \ - -i http://openapi:8080/openapi.json \ - -g typescript-axios \ - -o /src/src/api/client -} - -main() { - command=$1 - case $command in - generate) - shift - $command $@ - exit $? - ;; - -h|--help) - usage - return - ;; - *) - echo "$command is not a valid command" - usage - exit 1 - esac -} - -main $@ diff --git a/devscripts/run-agent b/devscripts/run-agent new file mode 100755 index 00000000000..8caa5a75b91 --- /dev/null +++ b/devscripts/run-agent @@ -0,0 +1,100 @@ +#!/bin/sh + +set -e + +usage() { + cat <, -s , and -o "" parameters, which are used during agent initialization. If PRIVATE_KEY is not specified via -o, defaults to /tmp/shellhub-agent. Leave other parameters blank for default values. + +When is '.', the agent will be built in the current Git state and named 'latest'. + +Usage: + $0 [-t ] [-s ] [-o ] +Examples: + $0 . + $0 v0.15.0 + $0 v0.15.0 -t 00000000-0000-4000-0000-000000000000 -s http://127.0.0.1 -o "KEEPALIVE_INTERVAL=30 PREFERRED_HOSTNAME=127.0.0.2" +EOF + exit 0 +} + +if [ "$#" -eq 0 ]; then + usage +fi + +. "$(dirname "$0")/utils" + +tag="$1" +shift + +tenant_id="00000000-0000-4000-0000-000000000000" +server_address="http://127.0.0.1" +private_key="/tmp/shellhub-agent" +additional_env="" + +# Processa as flags usando getopts +while [ "$#" -gt 0 ]; do + case "$1" in + -t) + tenant_id="$2" + shift 2 + ;; + -s) + server_address="$2" + shift 2 + ;; + -o) + additional_env="$2" + shift 2 + ;; + --help) + usage + ;; + *) + echo "Invalid option: $1" + usage + ;; + esac +done + +if [ "$tag" = "." ]; then + tag="latest" +fi + +if [ -z "$(echo "$additional_env" | grep PRIVATE_KEY)" ]; then + if [ -n "$private_key" ]; then + additional_env="PRIVATE_KEY=$private_key $additional_env" + fi +fi + +echo "Tenant ID: $tenant_id" +echo "Server Address: $server_address" +echo "Additional Environment Variables: $additional_env" + +if [ ! -d ./bin/agent ]; then + mkdir -p ./bin/agent +fi + +if [ "$tag" = "latest" ]; then + cd ./agent + go build -ldflags "-X main.AgentVersion=$tag" -o "./../bin/agent/$tag" + cd ./.. +elif [ ! -f ./bin/agent/$tag ]; then + check_bin "git" + + echo "No '$tag' agent found. Building one." + + branch=$(git symbolic-ref --short HEAD) + + ( + git checkout $tag + + cd ./agent + go build -ldflags "-X main.AgentVersion=${tag:1}" -o "./../bin/agent/$tag" # Remove the 'v' from the beginning of the tag. + cd ./.. + + git switch $branch + ) +fi + +sudo -E TENANT_ID=$tenant_id SERVER_ADDRESS=$server_address $additional_env ./bin/agent/$tag diff --git a/devscripts/update-go b/devscripts/update-go new file mode 100755 index 00000000000..5e3b2df1163 --- /dev/null +++ b/devscripts/update-go @@ -0,0 +1,42 @@ +#!/usr/bin/env sh + +set -e + +# Print the usage when none arguments are provided. +if [ "$#" -eq 0 ]; then + cat <. + +Usage: + $0 +Examples: + $0 1.21 +EOF + exit 0 +fi + +. "$(dirname "$0")/utils" + +version=$1 + +directories=( + "" # Parent path + "api" + "cli" + "ssh" + "agent" + "tests" +) + +for dir in "${directories[@]}"; do + abs_path="$SHELLHUB_PATH/$dir" + echo "Processing directory: $abs_path" + cd "$abs_path" || { echo "Failed to enter directory $abs_path"; exit 1; } + + go mod edit -go=$version + go mod tidy + + echo "Completed processing directory: $abs_path" +done + +echo "Script execution completed." diff --git a/devscripts/utils b/devscripts/utils new file mode 100644 index 00000000000..d9b92e5d147 --- /dev/null +++ b/devscripts/utils @@ -0,0 +1,15 @@ +#!/bin/sh + +set -e + +check_bin() { + local cmd=$1 + + if ! command -v "$cmd" &> /dev/null + then + echo "$cmd could not be found" + exit 127 + fi +} + +SHELLHUB_PATH=$(dirname $(dirname "$(realpath "$0")")) diff --git a/docker-compose.agent.yml b/docker-compose.agent.yml new file mode 100644 index 00000000000..61e147db92d --- /dev/null +++ b/docker-compose.agent.yml @@ -0,0 +1,37 @@ +services: + agent: + image: agent + restart: unless-stopped + build: + context: . + dockerfile: agent/Dockerfile + target: development + network: host + args: + - SHELLHUB_VERSION=latest + - GOPROXY=${SHELLHUB_GOPROXY:-} + - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} + privileged: true + network_mode: host + pid: host + environment: + - SHELLHUB_SERVER_ADDRESS=http://localhost:${SHELLHUB_HTTP_PORT} + - SHELLHUB_PRIVATE_KEY=/go/src/github.com/shellhub-io/shellhub/agent/shellhub.key + - SHELLHUB_TENANT_ID=00000000-0000-4000-0000-000000000000 + - SHELLHUB_VERSION=${SHELLHUB_VERSION} + - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} + - SHELLHUB_LOG_FORMAT=${SHELLHUB_LOG_FORMAT} + - SHELLHUB_PERMIT_EMPTY_PASSWORDS=${SHELLHUB_PERMIT_EMPTY_PASSWORDS} + - SHELLHUB_TRANSPORT_VERSION=2 + volumes: + - ./agent:/go/src/github.com/shellhub-io/shellhub/agent + - ./pkg:/go/src/github.com/shellhub-io/shellhub/pkg + - /:/host + - /dev:/dev + - /var/run/docker.sock:/var/run/docker.sock + - ./.golangci.yaml:/.golangci.yaml + - /tmp:/tmp + depends_on: + - api + - ssh + - gateway diff --git a/docker-compose.autossl.yml b/docker-compose.autossl.yml index e910c4ff9fb..8982b685385 100644 --- a/docker-compose.autossl.yml +++ b/docker-compose.autossl.yml @@ -1,5 +1,3 @@ -version: '3.7' - services: gateway: ports: diff --git a/docker-compose.connector.dev.yml b/docker-compose.connector.dev.yml deleted file mode 100644 index 68806f2e851..00000000000 --- a/docker-compose.connector.dev.yml +++ /dev/null @@ -1,29 +0,0 @@ -version: "3.7" - -services: - connector: - image: connector - restart: unless-stopped - build: - context: . - dockerfile: connector/Dockerfile - target: development - network: host - args: - - GOPROXY=${SHELLHUB_GOPROXY} - - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} - network_mode: host - environment: - - SHELLHUB_SERVER_ADDRESS=http://${SHELLHUB_DOMAIN}/ - - SHELLHUB_PRIVATE_KEYS=/go/src/github.com/shellhub-io/shellhub/connector/.keys/ - - SHELLHUB_TENANT_ID=00000000-0000-4000-0000-000000000000 - - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} - volumes: - - ./connector:/go/src/github.com/shellhub-io/shellhub/connector - - ./pkg:/go/src/github.com/shellhub-io/shellhub/pkg - - /var/run/docker.sock:/var/run/docker.sock - - ./.golangci.yaml:/.golangci.yaml - depends_on: - - api - - ssh - - gateway diff --git a/docker-compose.connector.yml b/docker-compose.connector.yml deleted file mode 100644 index ab2b53f652d..00000000000 --- a/docker-compose.connector.yml +++ /dev/null @@ -1,21 +0,0 @@ -version: "3.7" - -services: - connector: - image: shellhubio/connector:${SHELLHUB_VERSION} - restart: unless-stopped - network_mode: host - environment: - - SHELLHUB_SERVER_ADDRESS=http://${SHELLHUB_DOMAIN}/ - - SHELLHUB_PRIVATE_KEYS=/var/run/connector/keys - - SHELLHUB_TENANT_ID=${SHELLHUB_CONNECTOR_TENANT_ID} - - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} - volumes: - - /var/run/docker.sock:/var/run/docker.sock - - "connector_keys:/var/run/connector/keys" - depends_on: - - api - - ssh - - gateway -volumes: - connector_keys: diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 792aaefc544..63a0364a8a8 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,5 +1,3 @@ -version: "3.7" - services: ssh: image: ssh @@ -9,7 +7,7 @@ services: target: development network: host args: - - GOPROXY=${SHELLHUB_GOPROXY} + - GOPROXY=${SHELLHUB_GOPROXY:-} volumes: - ./ssh:/go/src/github.com/shellhub-io/shellhub/ssh - ./pkg:/go/src/github.com/shellhub-io/shellhub/pkg @@ -17,7 +15,6 @@ services: environment: - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} - - SHELLHUB_BILLING=${SHELLHUB_BILLING} - SHELLHUB_ENV=${SHELLHUB_ENV} api: image: api @@ -27,13 +24,15 @@ services: target: development network: host args: - - GOPROXY=${SHELLHUB_GOPROXY} + - GOPROXY=${SHELLHUB_GOPROXY:-} volumes: - ./api:/go/src/github.com/shellhub-io/shellhub/api - ./pkg:/go/src/github.com/shellhub-io/shellhub/pkg + - ./openapi:/go/src/github.com/shellhub-io/shellhub/openapi - /var/run/docker.sock:/var/run/docker.sock - ./.golangci.yaml:/.golangci.yaml environment: + - TESTCONTAINERS_DOCKER_NETWORK=${SHELLHUB_NETWORK} - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} - SHELLHUB_ENV=${SHELLHUB_ENV} @@ -45,7 +44,7 @@ services: target: development network: host args: - - NPM_CONFIG_REGISTRY=${SHELLHUB_NPM_REGISTRY} + - NPM_CONFIG_REGISTRY=${SHELLHUB_NPM_REGISTRY:-} volumes: - ./ui:/src gateway: @@ -55,52 +54,20 @@ services: dockerfile: gateway/Dockerfile target: development volumes: - - ./gateway/nginx.conf:/app/nginx.conf - - ./gateway/conf.d/:/app/conf.d/ + - ./gateway:/go/src/github.com/shellhub-io/shellhub/gateway + - ./pkg:/go/src/github.com/shellhub-io/shellhub/pkg + - ./.golangci.yaml:/.golangci.yaml + - ./gateway/nginx:/templates environment: - SHELLHUB_VERSION=latest - SHELLHUB_ENV=${SHELLHUB_ENV} - agent: - image: agent - restart: unless-stopped - build: - context: . - dockerfile: agent/Dockerfile - target: development - network: host - args: - - SHELLHUB_VERSION=latest - - GOPROXY=${SHELLHUB_GOPROXY} - - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} - privileged: true - network_mode: host - pid: host - environment: - - SHELLHUB_SERVER_ADDRESS=http://localhost:${SHELLHUB_HTTP_PORT} - - SHELLHUB_PRIVATE_KEY=/go/src/github.com/shellhub-io/shellhub/agent/shellhub.key - - SHELLHUB_TENANT_ID=00000000-0000-4000-0000-000000000000 - - SHELLHUB_VERSION=${SHELLHUB_VERSION} - - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} - volumes: - - ./agent:/go/src/github.com/shellhub-io/shellhub/agent - - ./pkg:/go/src/github.com/shellhub-io/shellhub/pkg - - /:/host - - /dev:/dev - - /etc/passwd:/etc/passwd - - /etc/group:/etc/group - - /var/run/docker.sock:/var/run/docker.sock - - ./.golangci.yaml:/.golangci.yaml - depends_on: - - api - - ssh - - gateway + - SHELLHUB_GATEWAY_ACCESS_LOGS=${SHELLHUB_GATEWAY_ACCESS_LOGS} cli: image: cli build: context: . dockerfile: cli/Dockerfile target: development - entrypoint: /entrypoint.sh environment: - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} @@ -109,6 +76,20 @@ services: - ./pkg:/go/src/github.com/shellhub-io/shellhub/pkg - ./api:/go/src/github.com/shellhub-io/shellhub/api - ./.golangci.yaml:/.golangci.yaml - depends_on: - - api - - mongo + openapi: + image: openapi + build: + context: . + dockerfile: openapi/Dockerfile + target: development + args: + - GOPROXY=${SHELLHUB_GOPROXY:-} + volumes: + - ./openapi:/go/src/github.com/shellhub-io/shellhub/openapi + - ./pkg:/go/src/github.com/shellhub-io/shellhub/pkg + - ./.golangci.yaml:/.golangci.yaml + environment: + - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} + - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} + networks: + - shellhub diff --git a/docker-compose.enterprise.yml b/docker-compose.enterprise.yml index c5f64a64971..11ef4c57fe0 100644 --- a/docker-compose.enterprise.yml +++ b/docker-compose.enterprise.yml @@ -1,5 +1,3 @@ -version: '3.7' - services: api: image: registry.infra.ossystems.io/cache/shellhubio/api:${SHELLHUB_VERSION} @@ -21,31 +19,44 @@ services: ssh: image: registry.infra.ossystems.io/cache/shellhubio/ssh:${SHELLHUB_VERSION} - environment: - - RECORD_URL=cloud-api:8080 - - BILLING_URL=billing-api:8080 - cloud-api: - image: registry.infra.ossystems.io/shellhub/cloud-api:${SHELLHUB_VERSION} - restart: unless-stopped - environment: - - EMAIL_API_KEY=${SHELLHUB_EMAIL_API_KEY} - - EMAIL_FROM_NAME=${SHELLHUB_EMAIL_FROM_NAME} - - EMAIL_FROM_ADDRESS=${SHELLHUB_EMAIL_FROM_ADDRESS} - networks: - - shellhub - admin-api: - image: registry.infra.ossystems.io/shellhub/admin-api:${SHELLHUB_VERSION} + + cloud: + image: registry.infra.ossystems.io/shellhub/cloud:${SHELLHUB_VERSION} restart: unless-stopped environment: + - DOMAIN=${SHELLHUB_DOMAIN} + - AUTO_SSL=${SHELLHUB_AUTO_SSL} + - EMAIL_API_KEY=${SHELLHUB_EMAIL_API_KEY-} + - EMAIL_FROM_NAME=${SHELLHUB_EMAIL_FROM_NAME-} + - EMAIL_FROM_ADDRESS=${SHELLHUB_EMAIL_FROM_ADDRESS-} + - SAML_SECRET=${SHELLHUB_SAML_SECRET} + - SHELLHUB_WEB_ENDPOINTS=${SHELLHUB_WEB_ENDPOINTS} + - SHELLHUB_WEB_ENDPOINTS_DOMAIN=${SHELLHUB_WEB_ENDPOINTS_DOMAIN} - ADMIN_API_USERNAME=${SHELLHUB_ENTERPRISE_ADMIN_USERNAME} - ADMIN_API_PASSWORD=${SHELLHUB_ENTERPRISE_ADMIN_PASSWORD} + - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} + - SHELLHUB_OBJECT_STORAGE_ENDPOINT=${SHELLHUB_OBJECT_STORAGE_ENDPOINT} + - SHELLHUB_OBJECT_STORAGE_REGION=${SHELLHUB_OBJECT_STORAGE_REGION} + - SHELLHUB_OBJECT_STORAGE_BUCKET=${SHELLHUB_OBJECT_STORAGE_BUCKET} + - SHELLHUB_OBJECT_STORAGE_ACCESS_KEY=${SHELLHUB_OBJECT_STORAGE_ACCESS_KEY} + - SHELLHUB_OBJECT_STORAGE_SECRET_KEY=${SHELLHUB_OBJECT_STORAGE_SECRET_KEY} + - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} + - SHELLHUB_BILLING=${SHELLHUB_BILLING} + - STRIPE_SECRET_KEY=${STRIPE_SECRET_KEY} + - STRIPE_PRICE_ID=${STRIPE_PRICE_ID} + - STRIPE_WEBHOOK_SECRET=${STRIPE_WEBHOOK_SECRET} secrets: - api_private_key - api_public_key networks: - shellhub - dashboard: - image: registry.infra.ossystems.io/shellhub/dashboard:${SHELLHUB_VERSION} + + minio: + image: minio/minio:RELEASE.2025-05-24T17-08-30Z restart: unless-stopped + command: server /data + environment: + - MINIO_ROOT_USER=${SHELLHUB_OBJECT_STORAGE_ACCESS_KEY} + - MINIO_ROOT_PASSWORD=${SHELLHUB_OBJECT_STORAGE_SECRET_KEY} networks: - shellhub diff --git a/docker-compose.test.yml b/docker-compose.test.yml new file mode 100644 index 00000000000..e2efe1fbd28 --- /dev/null +++ b/docker-compose.test.yml @@ -0,0 +1,42 @@ +version: "3.7" + +services: + ssh: + image: ssh:test + build: + context: . + dockerfile: ssh/Dockerfile + target: production + healthcheck: + interval: 5s + start_period: 10s + retries: 20 + ports: [] + api: + image: api:test + build: + context: . + dockerfile: api/Dockerfile + target: production + healthcheck: + interval: 5s + start_period: 10s + retries: 20 + ports: [] + gateway: + image: gateway:test + build: + context: . + dockerfile: gateway/Dockerfile + target: production + healthcheck: + interval: 5s + start_period: 10s + retries: 20 + ports: [] + mongo: + healthcheck: + interval: 5s + start_period: 10s + retries: 20 + ports: [] diff --git a/docker-compose.yml b/docker-compose.yml index 082d77bbf5f..98b4b65eb9d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: "3.7" - services: ssh: image: shellhubio/ssh:${SHELLHUB_VERSION} @@ -9,10 +7,15 @@ services: - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} - - SHELLHUB_BILLING=${SHELLHUB_BILLING} + - SHELLHUB_LOG_FORMAT=${SHELLHUB_LOG_FORMAT} - ALLOW_PUBLIC_KEY_ACCESS_BELLOW_0_6_0=${SHELLHUB_ALLOW_PUBLIC_KEY_ACCESS_BELLOW_0_6_0} - - RECORD_URL=${SHELLHUB_RECORD_URL} - - BILLING_URL=${SHELLHUB_BILLING_URL} + - SHELLHUB_WEB_ENDPOINTS=${SHELLHUB_WEB_ENDPOINTS} + - SHELLHUB_WEB_ENDPOINTS_DOMAIN=${SHELLHUB_WEB_ENDPOINTS_DOMAIN} + - SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_COUNT=${SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_COUNT} + - SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_WAIT_TIME=${SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_WAIT_TIME} + - SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_MAX_WAIT_TIME=${SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_MAX_WAIT_TIME} + - SHELLHUB_INTERNAL_HTTP_CLIENT_API_BASE_URL=${SHELLHUB_INTERNAL_HTTP_CLIENT_API_BASE_URL} + - SHELLHUB_INTERNAL_HTTP_CLIENT_ENTERPRISE_BASE_URL=${SHELLHUB_INTERNAL_HTTP_CLIENT_ENTERPRISE_BASE_URL} ports: - "${SHELLHUB_SSH_PORT}:2222" secrets: @@ -23,6 +26,8 @@ services: test: "curl -f http://ssh:8080/healthcheck || exit 1" interval: 30s start_period: 10s + depends_on: + - redis api: image: shellhubio/api:${SHELLHUB_VERSION} restart: unless-stopped @@ -31,26 +36,35 @@ services: - PRIVATE_KEY=/run/secrets/api_private_key - PUBLIC_KEY=/run/secrets/api_public_key - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} - - SHELLHUB_BILLING=${SHELLHUB_BILLING} - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} - - GEOIP=${SHELLHUB_GEOIP} + - MAXMIND_MIRROR=${SHELLHUB_MAXMIND_MIRROR} - MAXMIND_LICENSE=${SHELLHUB_MAXMIND_LICENSE} - - RECORD_RETENTION=${SHELLHUB_RECORD_RETENTION} - - TELEMETRY=${SHELLHUB_TELEMETRY} - - TELEMETRY_SCHEDULE=${SHELLHUB_TELEMETRY_SCHEDULE} - - SESSION_RECORD_CLEANUP_SCHEDULE=${SHELLHUB_SESSION_RECORD_CLEANUP_SCHEDULE} + - TELEMETRY=${SHELLHUB_TELEMETRY:-} + - TELEMETRY_SCHEDULE=${SHELLHUB_TELEMETRY_SCHEDULE:-} - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} + - SHELLHUB_LOG_FORMAT=${SHELLHUB_LOG_FORMAT} - SENTRY_DSN=${SHELLHUB_SENTRY_DSN} - - SHELLLHUB_ANNOUNCEMENTS=${SHELLLHUB_ANNOUNCEMENTS} + - SHELLLHUB_ANNOUNCEMENTS=${SHELLLHUB_ANNOUNCEMENTS:-} - SHELLHUB_SSH_PORT=${SHELLHUB_SSH_PORT} - SHELLHUB_DOMAIN=${SHELLHUB_DOMAIN} - ASYNQ_GROUP_MAX_DELAY=${SHELLHUB_ASYNQ_GROUP_MAX_DELAY} - - ASYNQ_GROUP_GRACE_PERIOD=${SHELLHUB_ASNYQ_GROUP_GRACE_PERIOD} + - ASYNQ_GROUP_GRACE_PERIOD=${SHELLHUB_ASYNQ_GROUP_GRACE_PERIOD} - ASYNQ_GROUP_MAX_SIZE=${SHELLHUB_ASYNQ_GROUP_MAX_SIZE} + - ASYNQ_UNIQUENESS_TIMEOUT=${SHELLHUB_ASYNQ_UNIQUENESS_TIMEOUT} + - REDIS_CACHE_POOL_SIZE=${SHELLHUB_REDIS_CACHE_POOL_SIZE} + - MAXIMUM_ACCOUNT_LOCKOUT=${SHELLHUB_MAXIMUM_ACCOUNT_LOCKOUT} + - METRICS=${SHELLHUB_METRICS} + - SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_COUNT=${SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_COUNT} + - SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_WAIT_TIME=${SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_WAIT_TIME} + - SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_MAX_WAIT_TIME=${SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_MAX_WAIT_TIME} + - SHELLHUB_INTERNAL_HTTP_CLIENT_API_BASE_URL=${SHELLHUB_INTERNAL_HTTP_CLIENT_API_BASE_URL} + - SHELLHUB_INTERNAL_HTTP_CLIENT_ENTERPRISE_BASE_URL=${SHELLHUB_INTERNAL_HTTP_CLIENT_ENTERPRISE_BASE_URL} depends_on: - mongo + - redis links: - mongo + - redis secrets: - api_private_key - api_public_key @@ -65,46 +79,80 @@ services: restart: unless-stopped environment: - SHELLHUB_STRIPE_PUBLISHABLE_KEY=${STRIPE_PUBLISHABLE_KEY} - - SHELLHUB_BILLING=${SHELLHUB_BILLING} + - SHELLHUB_CHATWOOT_WEBSITE_TOKEN=${SHELLHUB_CHATWOOT_WEBSITE_TOKEN-} + - SHELLHUB_CHATWOOT_BASEURL=${SHELLHUB_CHATWOOT_BASEURL-} - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} - SHELLHUB_ANNOUNCEMENTS=${SHELLHUB_ANNOUNCEMENTS} + - SHELLHUB_TINYMCE_API_KEY=${TINYMCE_API_KEY} - SHELLHUB_VERSION=${SHELLHUB_VERSION} - SHELLHUB_SENTRY_DSN=${SHELLHUB_SENTRY_DSN} + - SHELLHUB_PAYWALL=${SHELLHUB_PAYWALL} + - SHELLHUB_CONNECTOR=${SHELLHUB_CONNECTOR} + - SHELLHUB_WEB_ENDPOINTS=${SHELLHUB_WEB_ENDPOINTS} + - SHELLHUB_WEB_ENDPOINTS_DOMAIN=${SHELLHUB_WEB_ENDPOINTS_DOMAIN} + - SHELLHUB_ONBOARDING_URL=${SHELLHUB_ONBOARDING_URL} networks: - shellhub + healthcheck: + test: "curl -f http://ui:8080/healthcheck || exit 1" + interval: 30s + start_period: 10s gateway: image: shellhubio/gateway:${SHELLHUB_VERSION} restart: unless-stopped environment: + - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} + - SHELLHUB_LOG_FORMAT=${SHELLHUB_LOG_FORMAT} - SHELLHUB_DOMAIN=${SHELLHUB_DOMAIN} - - SHELLHUB_PUBLIC_URL_DOMAIN=${SHELLHUB_PUBLIC_URL_DOMAIN} + - SHELLHUB_WEB_ENDPOINTS=${SHELLHUB_WEB_ENDPOINTS} + - SHELLHUB_WEB_ENDPOINTS_DOMAIN=${SHELLHUB_WEB_ENDPOINTS_DOMAIN} + - SHELLHUB_WEB_ENDPOINTS_DNS_PROVIDER=${SHELLHUB_WEB_ENDPOINTS_DNS_PROVIDER} + - SHELLHUB_WEB_ENDPOINTS_DNS_PROVIDER_TOKEN=${SHELLHUB_WEB_ENDPOINTS_DNS_PROVIDER_TOKEN} + - SHELLHUB_WEB_ENDPOINTS_ACME_DNS_URL=${SHELLHUB_WEB_ENDPOINTS_ACME_DNS_URL} + - SHELLHUB_WEB_ENDPOINTS_ACME_DNS_USERNAME=${SHELLHUB_WEB_ENDPOINTS_ACME_DNS_USERNAME} + - SHELLHUB_WEB_ENDPOINTS_ACME_DNS_PASSWORD=${SHELLHUB_WEB_ENDPOINTS_ACME_DNS_PASSWORD} + - SHELLHUB_WEB_ENDPOINTS_ACME_DNS_SUBDOMAIN=${SHELLHUB_WEB_ENDPOINTS_ACME_DNS_SUBDOMAIN} - SHELLHUB_VERSION=${SHELLHUB_VERSION} - SHELLHUB_SSH_PORT=${SHELLHUB_SSH_PORT} - SHELLHUB_PROXY=${SHELLHUB_PROXY} - SHELLHUB_ENTERPRISE=${SHELLHUB_ENTERPRISE} - SHELLHUB_CLOUD=${SHELLHUB_CLOUD} - SHELLHUB_AUTO_SSL=${SHELLHUB_AUTO_SSL} + - SHELLHUB_API_RATE_LIMIT=${SHELLHUB_API_RATE_LIMIT} + - SHELLHUB_API_RATE_LIMIT_ZONE_SIZE=${SHELLHUB_API_RATE_LIMIT_ZONE_SIZE} + - SHELLHUB_API_BURST_SIZE=${SHELLHUB_API_BURST_SIZE} + - SHELLHUB_API_BURST_DELAY=${SHELLHUB_API_BURST_DELAY} + - SHELLHUB_GATEWAY_ACCESS_LOGS=${SHELLHUB_GATEWAY_ACCESS_LOGS} depends_on: - api - ui ports: - - ${SHELLHUB_HTTP_PORT}:80 + - ${SHELLHUB_BIND_ADDRESS}:${SHELLHUB_HTTP_PORT}:80 networks: - shellhub + healthcheck: + test: > + sh -c ' + if [ "$SHELLHUB_PROXY" = "true" ]; then + curl -f -k -L --haproxy-protocol http://gateway/healthcheck || exit 1 + else + curl -f -k -L http://gateway/healthcheck || exit 1 + fi + ' + interval: 30s + start_period: 10s cli: image: shellhubio/cli:${SHELLHUB_VERSION} - entrypoint: /bin/sleep infinity - restart: unless-stopped + stop_signal: SIGKILL + command: /bin/sleep infinity environment: - SHELLHUB_LOG_LEVEL=${SHELLHUB_LOG_LEVEL} - depends_on: - - api - - mongo + - SHELLHUB_LOG_FORMAT=${SHELLHUB_LOG_FORMAT} networks: - shellhub mongo: - image: mongo:4.4.8 + image: mongo:4.4.29 restart: unless-stopped healthcheck: test: 'test $$(echo "rs.initiate({ _id: ''rs'', members: [ { _id: 0, host: ''mongo:27017'' } ] }).ok || rs.status().ok" | mongo --quiet) -eq 1' @@ -116,6 +164,7 @@ services: redis: image: redis restart: unless-stopped + command: ["redis-server", "--appendonly", "no", "--save", "\"\""] networks: - shellhub @@ -129,4 +178,4 @@ secrets: networks: shellhub: - name: shellhub_network + name: ${SHELLHUB_NETWORK} diff --git a/gateway/.air.toml b/gateway/.air.toml new file mode 100644 index 00000000000..c59fdac1dfc --- /dev/null +++ b/gateway/.air.toml @@ -0,0 +1,32 @@ +root = "../" +tmp_dir = "tmp" + +[build] +pre_cmd = [] +cmd = "go build -gcflags=\"all=-N -l\" -o ./tmp/main ." +post_cmd = [] +bin = "" +full_bin = "dlv exec ./tmp/main" +args_bin = [ + "--listen=0.0.0.0:2345", + "--headless", + "--continue", + "--accept-multiclient", +] +delay = 500 +exclude_dir = ["assets", "tmp", "vendor", "testdata"] +exclude_file = [] +exclude_regex = ["_test.go"] +exclude_unchanged = false +follow_symlink = false +include_dir = [] +include_ext = ["go", "tpl", "tmpl", "html"] +include_file = [] +kill_delay = "0s" +log = "build-errors.log" +poll = false +poll_interval = 0 +rerun = false +rerun_delay = 500 +send_interrupt = false +stop_on_error = false diff --git a/gateway/Dockerfile b/gateway/Dockerfile index 63542953af7..61c64ea2bbb 100644 --- a/gateway/Dockerfile +++ b/gateway/Dockerfile @@ -1,30 +1,76 @@ -FROM nginx:1.25.3-alpine AS base +# base stage +FROM golang:1.24-alpine3.22 AS base -RUN ["rm", "/etc/nginx/conf.d/default.conf"] +ARG GOPROXY -COPY --from=hairyhenderson/gomplate:v2.5.0-slim /gomplate /bin/gomplate +RUN apk add --no-cache git ca-certificates libgcc curl certbot certbot-nginx certbot-dns certbot-dns-digitalocean certbot-dns-cloudflare -RUN apk add inotify-tools -RUN apk add certbot certbot-nginx +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub -RUN mkdir -p /etc/nginx/default.d +COPY ./go.mod ./ -RUN mkdir /app +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/gateway -VOLUME /etc/letsencrypt +COPY ./gateway/go.mod ./gateway/go.sum ./ -WORKDIR /app +RUN go mod download -COPY ./gateway/entrypoint.sh / +# builder stage +FROM base AS builder + +COPY ./pkg $GOPATH/src/github.com/shellhub-io/shellhub/pkg +COPY ./gateway . + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub + +RUN go mod download + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/gateway + +RUN go build + +# development stage +FROM base AS development + +COPY --from=openresty/openresty:1.25.3.1-5-alpine-apk /usr/local/openresty /usr/local/openresty + +RUN mkdir /etc/shellhub-gateway + +RUN mkdir -p /var/run/openresty /etc/letsencrypt && \ + curl -sSL https://ssl-config.mozilla.org/ffdhe2048.txt -o /etc/shellhub-gateway/dhparam.pem + +RUN apk add --update openssl build-base +RUN go install github.com/air-verse/air@v1.62 && \ + go install github.com/go-delve/delve/cmd/dlv@v1.25 && \ + go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.1.6 && \ + go install github.com/vektra/mockery/v2/...@v2.20.0 + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub + +RUN go mod download + +COPY ./gateway/entrypoint-dev.sh /entrypoint.sh + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/gateway ENTRYPOINT ["/entrypoint.sh"] -CMD ["/usr/sbin/nginx", "-g", "daemon off;"] +# production stage +FROM alpine:3.23.2 AS production + +RUN apk add libgcc curl certbot certbot-nginx certbot-dns certbot-dns-digitalocean certbot-dns-cloudflare + +COPY --from=openresty/openresty:1.25.3.1-5-alpine-apk /usr/local/openresty /usr/local/openresty + +RUN mkdir /etc/shellhub-gateway + +RUN mkdir -p /var/run/openresty /etc/letsencrypt && \ + curl -sSL https://ssl-config.mozilla.org/ffdhe2048.txt -o /etc/shellhub-gateway/dhparam.pem -FROM base as development +COPY --from=builder /go/src/github.com/shellhub-io/shellhub/gateway/gateway /gateway -FROM base as production +RUN mkdir /templates -COPY ./gateway/nginx.conf /app/ +COPY ./gateway/nginx /templates -COPY ./gateway/conf.d /app/conf.d +ENTRYPOINT ["/gateway"] diff --git a/gateway/README.md b/gateway/README.md new file mode 100644 index 00000000000..13f1da33aec --- /dev/null +++ b/gateway/README.md @@ -0,0 +1,27 @@ +# Gateway + +The `gateway` is a crucial component in ShellHub, managing inbound traffic. It acts as a bridge between +end-users, device agents, and internal services, routing HTTP requests and WebSocket connections to the +appropriate services within the ShellHub Server. + +## Features + +- **NGINX Process Control**: Manages the lifecycle of the NGINX server process, including starting, stopping, +and dynamically reloading configurations. +- **Dynamic Configuration**: Generates NGINX configuration files using Go templates, dynamically incorporating +values from environment variables. +- **Certbot Management**: Manages SSL/TLS certificates through Certbot, including the generation, installation, +and renewal of certificates from Let's Encrypt. + +## Architecture + +The `gateway` is a Go application that utilizes NGINX/OpenResty as a reverse proxy and serves as the entry point +to a Docker image built from a minimal Alpine Linux base image. On top of this base image, OpenResty is installed +by copying it directly from the official OpenResty Docker image. + +## Configuration + +Configuration is managed via environment variables. The `gateway` processes NGINX template files found in +the `nginx` directory to create the final NGINX configuration files. These templates are written in Go template +and dynamically incorporate values from the `GatewayConfig` in `config.go`. In development mode, it watches for +changes in the `nginx` directory, allowing for dynamic updates to NGINX config without needing service restarts. diff --git a/gateway/certbot.go b/gateway/certbot.go new file mode 100644 index 00000000000..4b29924f019 --- /dev/null +++ b/gateway/certbot.go @@ -0,0 +1,607 @@ +// Package main provides SSL certificate management functionality using CertBot. +// It supports both HTTP-01 and DNS-01 challenge types for certificate generation +// and automatic renewal of SSL certificates. +package main + +import ( + "context" + "fmt" + "net" + "net/http" + "os" + "os/exec" + "path/filepath" + "time" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "github.com/spf13/afero" +) + +// Executor provides an interface for executing system commands. +// This interface allows for easy mocking in tests and provides +// a clean abstraction over the exec package. +// +//go:generate mockery --name=Executor --filename=executor.go +type Executor interface { + // Command creates a new *exec.Cmd with the given name and arguments. + Command(name string, arg ...string) *exec.Cmd + // Run executes the given command and waits for it to complete. + Run(cmd *exec.Cmd) error +} + +// executor is the default implementation of the Executor interface. +type executor struct{} + +// NewExecutor creates a new Executor instance. +func NewExecutor() Executor { + return &executor{} +} + +// Command creates a new *exec.Cmd with the given name and arguments. +func (e *executor) Command(name string, arg ...string) *exec.Cmd { + return exec.Command(name, arg...) +} + +// Run executes the given command and waits for it to complete. +func (e *executor) Run(cmd *exec.Cmd) error { + return cmd.Run() +} + +// Ticker provides an interface for time-based operations with context support. +// This interface allows for easy mocking in tests and provides a clean +// abstraction over the time package's ticker functionality. +// +//go:generate mockery --name=Ticker --filename=ticker.go +type Ticker interface { + // Init creates a new time.Ticker internally with the specified duration. + // The ticker will respect the provided context for cancellation. + Init(context.Context, time.Duration) + // Tick returns a channel that receives the current time on each tick. + // If the ticker wasn't initialized, the channel will be nil. + Tick() chan time.Time + // Stop stops the ticker. If the ticker wasn't initialized, this is a no-op. + Stop() +} + +// ticker is the default implementation of the Ticker interface. +type ticker struct { + ticker *time.Ticker + tick chan time.Time +} + +// Init creates a new time.Ticker internally with the specified duration. +// It starts a goroutine that forwards ticker events to the tick channel +// and handles context cancellation. +func (t *ticker) Init(ctx context.Context, duration time.Duration) { + t.ticker = time.NewTicker(duration) + t.tick = make(chan time.Time) + + go func() { + defer close(t.tick) + + for { + select { + case <-ctx.Done(): + return + case ticked, ok := <-t.ticker.C: + if !ok { + return + } + + t.tick <- ticked + } + } + }() +} + +// Tick returns a channel that receives the current time on each tick. +func (t *ticker) Tick() chan time.Time { + return t.tick +} + +// Stop stops the ticker. If the ticker wasn't initialized, this is a no-op. +func (t *ticker) Stop() { + if t.ticker == nil { + return + } + + t.ticker.Stop() +} + +// DNSProvider represents a DNS provider that can be used for DNS-01 challenges +// when generating SSL certificates. +type DNSProvider string + +// DigitalOceanDNSProvider represents the Digital Ocean DNS provider. +const DigitalOceanDNSProvider = "digitalocean" + +// CloudflareDNSProvider represents the Cloudflare DNS provider. +const CloudflareDNSProvider = "cloudflare" + +// AcmeDNSProvider represents the acme-dns provider for DNS-01 challenges. +const AcmeDNSProvider = "acmedns" + +// Config holds the configuration for CertBot operations. +type Config struct { + // RootDir is the root directory where CertBot stores its configurations + // and generated certificates. Typically "/etc/letsencrypt". + RootDir string + // Staging defines whether CertBot should use Let's Encrypt's staging server + // instead of the production server. Useful for testing to avoid rate limits. + Staging bool + // RenewedCallback is an optional callback function that gets called + // after a certificate is successfully renewed. + RenewedCallback func() +} + +// Certificate represents an SSL certificate that can be generated using CertBot. +type Certificate interface { + // String returns a string representation of the certificate, typically the domain name. + String() string + // Check checks if the environment is ready for certificate generation or renewal. + Check() error + // Generate creates the SSL certificate using CertBot. + // The staging parameter determines whether to use Let's Encrypt's staging server. + Generate(staging bool) error +} + +// DefaultCertificate represents a standard SSL certificate that uses HTTP-01 challenge +// for domain validation. This is suitable for single domains where you have control +// over the web server. +type DefaultCertificate struct { + // RootDir is the root directory for certificate storage. + RootDir string + // Domain is the domain name for which the certificate will be generated. + Domain string + + ex Executor + fs afero.Fs +} + +// NewDefaultCertificate creates a new DefaultCertificate instance for the given domain. +func NewDefaultCertificate(rootdir string, domain string) Certificate { + return &DefaultCertificate{ + RootDir: rootdir, + Domain: domain, + + ex: NewExecutor(), + fs: afero.NewOsFs(), + } +} + +// startACMEServer starts a local HTTP server on port 80 to handle ACME HTTP-01 challenges. +// This server serves files from the .well-known/acme-challenge directory which is +// required for Let's Encrypt domain validation. +func (d *DefaultCertificate) startACMEServer() *http.Server { + mux := http.NewServeMux() + mux.Handle( + "/.well-known/acme-challenge/", + http.StripPrefix( + "/.well-known/acme-challenge/", + http.FileServer( + http.Dir(filepath.Join(d.RootDir, ".well-known/acme-challenge")), + ), + ), + ) + + server := &http.Server{ //nolint:gosec + Handler: mux, + } + + listener, err := net.Listen("tcp", ":80") //nolint:gosec + if err != nil { + log.WithError(err).Fatal("failed to start ACME server listener") + } + + go func() { + if err := server.Serve(listener); err != nil && !errors.Is(err, http.ErrServerClosed) { + log.WithError(err).Fatal("acme server error") + } + }() + + return server +} + +// stopACMEServer gracefully stops the local ACME HTTP server. +func (d *DefaultCertificate) stopACMEServer(server *http.Server) { + if err := server.Close(); err != nil { + log.WithError(err).Fatal("could not stop ACME server") + } +} + +func (d *DefaultCertificate) Check() error { + if d.Domain == "" { + return errors.New("domain is required for certificate generation") + } + + if d.RootDir == "" { + return errors.New("root directory is required for certificate generation") + } + + if _, err := d.fs.Stat(d.RootDir); os.IsNotExist(err) { + if err := d.fs.MkdirAll(d.RootDir, 0o755); err != nil { + log.WithError(err).Error("failed to create root directory for certificate generation") + + return err + } + } + + return nil +} + +// Generate creates an SSL certificate for the domain using HTTP-01 challenge. +// It starts a local HTTP server to handle the ACME challenge, runs CertBot, +// and then stops the server. +func (d *DefaultCertificate) Generate(staging bool) error { + log.Info("generating SSL certificate") + + // Create the ACME challenge directory + challengeDir := fmt.Sprintf("%s/.well-known/acme-challenge", os.TempDir()) + if err := d.fs.MkdirAll(challengeDir, 0o755); err != nil { + log.WithError(err).Error("failed to create acme challenge on filesystem") + + return err + } + + // Start the ACME server to handle HTTP-01 challenges + acmeServer := d.startACMEServer() + + args := []string{ + "certonly", + "--non-interactive", + "--agree-tos", + "--register-unsafely-without-email", + "--webroot", + "--webroot-path", d.RootDir, + "--preferred-challenges", "http", + "-n", + "-d", + d.Domain, + } + + if staging { + log.Info("running generate with staging") + + args = append(args, "--staging") + } + + // Build the CertBot command + cmd := d.ex.Command( + "certbot", + args..., + ) + + cmd.Stdout, cmd.Stderr = os.Stdout, os.Stderr + + if err := d.ex.Run(cmd); err != nil { + log.Error("Failed to generate SSL certificate") + + return err + } + + // Stop the ACME server + d.stopACMEServer(acmeServer) + + log.Info("generate run") + + return nil +} + +// String returns the domain name as the string representation of the certificate. +func (d *DefaultCertificate) String() string { + return d.Domain +} + +// WebEndpointsCertificate represents a wildcard SSL certificate that uses DNS-01 challenge +// for domain validation. This is suitable for wildcard certificates (*.example.com) +// where you have control over the DNS records. +type WebEndpointsCertificate struct { + // Domain is the base domain used to generate wildcard certificates. + Domain string + // Provider is the DNS provider used for DNS-01 challenges. + Provider DNSProvider + // Token is the API token for the DNS provider (used for Cloudflare and DigitalOcean). + Token string + // AcmeDNSURL is the URL of the acme-dns server (only for acmedns provider). + AcmeDNSURL string + // AcmeDNSUsername is the username from acme-dns registration (only for acmedns provider). + AcmeDNSUsername string + // AcmeDNSPassword is the password from acme-dns registration (only for acmedns provider). + AcmeDNSPassword string + // AcmeDNSSubdomain is the subdomain from acme-dns registration (only for acmedns provider). + AcmeDNSSubdomain string + + ex Executor + fs afero.Fs +} + +// NewWebEndpointsCertificate creates a new TunnelsCertificate instance for generating +// wildcard certificates using DNS-01 challenges. +func NewWebEndpointsCertificate(domain string, provider DNSProvider, token string, acmeDNSURL, acmeDNSUsername, acmeDNSPassword, acmeDNSSubdomain string) Certificate { + return &WebEndpointsCertificate{ + Domain: domain, + + Provider: provider, + Token: token, + + AcmeDNSURL: acmeDNSURL, + AcmeDNSUsername: acmeDNSUsername, + AcmeDNSPassword: acmeDNSPassword, + AcmeDNSSubdomain: acmeDNSSubdomain, + + ex: NewExecutor(), + fs: afero.NewOsFs(), + } +} + +// generateProviderCredentialsFile creates a credentials file for the DNS provider. +// This file contains the API token needed for DNS-01 challenges. +func (d *WebEndpointsCertificate) generateProviderCredentialsFile() (afero.File, error) { + var content string + var filename string + + switch d.Provider { + case CloudflareDNSProvider: + // Certbot Cloudflare plugin expects dns_cloudflare_api_token + content = fmt.Sprintf("dns_cloudflare_api_token = %s", d.Token) + filename = "/etc/shellhub-gateway/cloudflare.ini" + + case DigitalOceanDNSProvider: + content = fmt.Sprintf("dns_digitalocean_token = %s", d.Token) + filename = "/etc/shellhub-gateway/digitalocean.ini" + + case AcmeDNSProvider: + // certbot-dns-acmedns expects a JSON file with acme-dns credentials + content = fmt.Sprintf(`{ + "%s": { + "username": "%s", + "password": "%s", + "fulldomain": "%s", + "subdomain": "%s", + "allowfrom": [] + } +}`, d.Domain, d.AcmeDNSUsername, d.AcmeDNSPassword, + fmt.Sprintf("_acme-challenge.%s", d.Domain), d.AcmeDNSSubdomain) + filename = "/etc/shellhub-gateway/acmedns.json" + + default: + return nil, fmt.Errorf("unsupported DNS provider: %s", d.Provider) + } + + file, err := d.fs.Create(filename) + if err != nil { + log.WithError(err).WithField("filename", filename).Error("failed to create credentials file") + + return nil, err + } + + if _, err := file.Write([]byte(content)); err != nil { + log.WithError(err).Error("failed to write credentials to file") + + return nil, err + } + + return file, nil +} + +func (d *WebEndpointsCertificate) Check() error { + if d.Domain == "" { + return errors.New("domain is required for certificate generation") + } + + if d.Provider == "" { + return errors.New("DNS provider is required for certificate generation") + } + + // Validate provider-specific credentials + switch d.Provider { + case CloudflareDNSProvider, DigitalOceanDNSProvider: + if d.Token == "" { + return fmt.Errorf("DNS provider token is required for %s", d.Provider) + } + case AcmeDNSProvider: + if d.AcmeDNSUsername == "" { + return errors.New("acme-dns username is required for acmedns provider") + } + if d.AcmeDNSPassword == "" { + return errors.New("acme-dns password is required for acmedns provider") + } + if d.AcmeDNSSubdomain == "" { + return errors.New("acme-dns subdomain is required for acmedns provider") + } + default: + return fmt.Errorf("unsupported DNS provider: %s", d.Provider) + } + + if _, err := d.fs.Stat("/etc/shellhub-gateway"); os.IsNotExist(err) { + if err := d.fs.MkdirAll("/etc/shellhub-gateway", 0o755); err != nil { + log.WithError(err).Error("failed to create /etc/shellhub-gateway directory") + + return err + } + } + + if _, err := d.generateProviderCredentialsFile(); err != nil { + log.WithError(err).Error("failed to generate provider credentials file") + + return err + } + + return nil +} + +// Generate creates a wildcard SSL certificate for the domain using DNS-01 challenge. +// It creates a credentials file for the DNS provider, runs CertBot with DNS plugin, +// and generates a wildcard certificate. +func (d *WebEndpointsCertificate) Generate(staging bool) error { + log.Info("generating SSL certificate with DNS") + + // Create the DNS provider credentials file + file, err := d.generateProviderCredentialsFile() + if err != nil { + log.WithError(err).Error("failed to generate credentials file") + + return err + } + + // Build the CertBot command arguments for DNS-01 challenge + args := []string{ + "certonly", + "--non-interactive", + "--agree-tos", + "--register-unsafely-without-email", + "--cert-name", + fmt.Sprintf("*.%s", d.Domain), + } + + // Add provider-specific arguments + if d.Provider == AcmeDNSProvider { + // certbot-dns-acmedns uses different flags + args = append(args, + "--dns-acmedns", + "--dns-acmedns-credentials", + file.Name(), + ) + } else { + // Standard certbot DNS plugins (cloudflare, digitalocean) + args = append(args, + fmt.Sprintf("--dns-%s", d.Provider), + fmt.Sprintf("--dns-%s-credentials", d.Provider), + file.Name(), + ) + } + + // Add domain + args = append(args, "-d", fmt.Sprintf("*.%s", d.Domain)) + + if staging { + log.Info("running generate with staging on dns") + + args = append(args, "--staging") + } + + cmd := d.ex.Command( //nolint:gosec + "certbot", + args..., + ) + + cmd.Stdout, cmd.Stderr = os.Stdout, os.Stderr + + if err := d.ex.Run(cmd); err != nil { + log.WithError(err).Error("failed to generate SSL certificate") + + return err + } + + log.Info("generate run on dns") + + return nil +} + +// String returns the domain name as the string representation of the certificate. +func (d *WebEndpointsCertificate) String() string { + return d.Domain +} + +// CertBot is the main structure that handles SSL certificate generation and renewal. +// It manages multiple certificates and provides automatic renewal functionality. +type CertBot struct { + // Config holds the configuration for CertBot operations. + Config *Config + + // Certificates is a list of certificates to manage. + Certificates []Certificate + + ex Executor + tk Ticker + fs afero.Fs +} + +// newCertBot creates a new CertBot instance with the given configuration. +func newCertBot(config *Config) *CertBot { + return &CertBot{ + Config: config, + + ex: new(executor), + tk: new(ticker), + fs: afero.NewOsFs(), + } +} + +// ensureCertificates checks if SSL certificates exist for all managed domains. +// If a certificate doesn't exist, it generates a new one. +func (cb *CertBot) ensureCertificates() { + for _, certificate := range cb.Certificates { + certPath := fmt.Sprintf("%s/live/%s/fullchain.pem", cb.Config.RootDir, certificate) + if _, err := cb.fs.Stat(certPath); os.IsNotExist(err) { + certificate.Generate(cb.Config.Staging) + } + } +} + +// executeRenewCertificates runs the CertBot renew command to check and renew +// certificates that are close to expiration. +func (cb *CertBot) executeRenewCertificates() error { + args := []string{ + "renew", + } + + if cb.Config.Staging { + log.Info("running renew with staging") + + args = append(args, "--staging") + } + + for _, certificate := range cb.Certificates { + if err := certificate.Check(); err != nil { + log.WithError(err).Error("certificate check failed") + + return err + } + } + + cmd := cb.ex.Command( //nolint:gosec + "certbot", + args..., + ) + + if err := cb.ex.Run(cmd); err != nil { + return err + } + + log.Info("renew run") + + return nil +} + +// renewCertificates starts a background process that periodically checks and renews +// SSL certificates. It runs in a loop with the specified duration between checks. +// The process respects context cancellation for graceful shutdown. +func (cb *CertBot) renewCertificates(ctx context.Context, duration time.Duration) { + log.Info("starting SSL certificate renewal process") + + cb.tk.Init(ctx, duration) + defer cb.tk.Stop() + + ticker := cb.tk.Tick() + + for { + select { + case <-ctx.Done(): + log.Info("renew certificates loop was closed due context cancellation") + + return + case <-ticker: + log.Info("checking if SSL certificate needs to be renewed") + if err := cb.executeRenewCertificates(); err != nil { + log.WithError(err).Error("failed to renew SSL certificate") + + continue + } + + log.Info("ssl certificate successfully renewed") + cb.Config.RenewedCallback() + } + } +} diff --git a/gateway/certbot_test.go b/gateway/certbot_test.go new file mode 100644 index 00000000000..ff1d5d6e84b --- /dev/null +++ b/gateway/certbot_test.go @@ -0,0 +1,489 @@ +package main + +import ( + "context" + "errors" + "os/exec" + "testing" + "time" + + gatewayMocks "github.com/shellhub-io/shellhub/gateway/mocks" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +// TestTunnelsCertificate_generateProviderCredentialsFile ensures the provider +// credentials file is generated correctly for various DNS providers. +func TestTunnelsCertificate_generateProviderCredentialsFile(t *testing.T) { + cases := []struct { + name string + provider DNSProvider + token string + wantFile string + wantContent string + }{ + { + name: "DigitalOcean", + provider: DigitalOceanDNSProvider, + token: "test-do", + wantFile: "/etc/shellhub-gateway/digitalocean.ini", + wantContent: "dns_digitalocean_token = test-do", + }, + { + name: "Cloudflare", + provider: CloudflareDNSProvider, + token: "test-cf", + wantFile: "/etc/shellhub-gateway/cloudflare.ini", + wantContent: "dns_cloudflare_api_token = test-cf", + }, + { + name: "AcmeDNS", + provider: AcmeDNSProvider, + token: "", // Not used for acme-dns + wantFile: "/etc/shellhub-gateway/acmedns.json", + wantContent: `{ + "localhost": { + "username": "test-username", + "password": "test-password", + "fulldomain": "_acme-challenge.localhost", + "subdomain": "test-subdomain", + "allowfrom": [] + } +}`, + }, + } + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + cert := WebEndpointsCertificate{ + Domain: "localhost", + Provider: tc.provider, + Token: tc.token, + } + + // Add acme-dns specific fields if needed + if tc.provider == AcmeDNSProvider { + cert.AcmeDNSUsername = "test-username" + cert.AcmeDNSPassword = "test-password" + cert.AcmeDNSSubdomain = "test-subdomain" + } + + cert.fs = afero.NewMemMapFs() + + file, err := cert.generateProviderCredentialsFile() + assert.NoError(t, err) + assert.Equal(t, tc.wantFile, file.Name()) + + data, err := afero.ReadFile(cert.fs, tc.wantFile) + assert.NoError(t, err) + assert.Equal(t, tc.wantContent, string(data)) + }) + } +} + +func TestTunnelsCertificate_generate(t *testing.T) { + tests := []struct { + name string + config WebEndpointsCertificate + staging bool + expected error + expectCalls func(*gatewayMocks.Executor) + }{ + { + name: "failed to run the command", + config: WebEndpointsCertificate{ + Domain: "localhost", + Provider: "digitalocean", + Token: "test", + }, + expectCalls: func(executorMock *gatewayMocks.Executor) { + executorMock.On("Command", "certbot", + "certonly", + "--non-interactive", + "--agree-tos", + "--register-unsafely-without-email", + "--cert-name", + "*.localhost", + "--dns-digitalocean", + "--dns-digitalocean-credentials", + "/etc/shellhub-gateway/digitalocean.ini", + "-d", + "*.localhost", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(errors.New("failed to run the command")).Once() + }, + expected: errors.New("failed to run the command"), + }, + { + name: "successful certificate generation", + config: WebEndpointsCertificate{ + Domain: "localhost", + Provider: "digitalocean", + Token: "test", + }, + expectCalls: func(executorMock *gatewayMocks.Executor) { + executorMock.On("Command", "certbot", + "certonly", + "--non-interactive", + "--agree-tos", + "--register-unsafely-without-email", + "--cert-name", + "*.localhost", + "--dns-digitalocean", + "--dns-digitalocean-credentials", + "/etc/shellhub-gateway/digitalocean.ini", + "-d", + "*.localhost", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + expected: nil, + }, + { + name: "successful certificate generation in staging", + config: WebEndpointsCertificate{ + Domain: "localhost", + Provider: "digitalocean", + Token: "test", + }, + staging: true, + expectCalls: func(executorMock *gatewayMocks.Executor) { + executorMock.On("Command", "certbot", + "certonly", + "--non-interactive", + "--agree-tos", + "--register-unsafely-without-email", + "--cert-name", + "*.localhost", + "--dns-digitalocean", + "--dns-digitalocean-credentials", + "/etc/shellhub-gateway/digitalocean.ini", + "-d", + "*.localhost", + "--staging", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + expected: nil, + }, + { + // Cloudflare provider invocation + name: "cloudflare provider", + config: WebEndpointsCertificate{ + Domain: "localhost", + Provider: "cloudflare", + Token: "test", + }, + expectCalls: func(executorMock *gatewayMocks.Executor) { + executorMock.On("Command", "certbot", + "certonly", + "--non-interactive", + "--agree-tos", + "--register-unsafely-without-email", + "--cert-name", + "*.localhost", + "--dns-cloudflare", + "--dns-cloudflare-credentials", + "/etc/shellhub-gateway/cloudflare.ini", + "-d", + "*.localhost", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + expected: nil, + }, + { + // AcmeDNS provider invocation + name: "acmedns provider", + config: WebEndpointsCertificate{ + Domain: "localhost", + Provider: "acmedns", + AcmeDNSUsername: "test-user", + AcmeDNSPassword: "test-pass", + AcmeDNSSubdomain: "test-subdomain", + }, + expectCalls: func(executorMock *gatewayMocks.Executor) { + executorMock.On("Command", "certbot", + "certonly", + "--non-interactive", + "--agree-tos", + "--register-unsafely-without-email", + "--cert-name", + "*.localhost", + "--dns-acmedns", + "--dns-acmedns-credentials", + "/etc/shellhub-gateway/acmedns.json", + "-d", + "*.localhost", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + expected: nil, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + executorMock := new(gatewayMocks.Executor) + + certificate := tc.config + certificate.fs = afero.NewMemMapFs() + certificate.ex = executorMock + + tc.expectCalls(executorMock) + + err := certificate.Generate(tc.staging) + assert.Equal(tt, tc.expected, err) + + executorMock.AssertExpectations(t) + }) + } +} + +func TestCertBot_executeRenewCertificates(t *testing.T) { + tests := []struct { + name string + config Config + expected error + expectCalls func(*gatewayMocks.Executor) + }{ + { + name: "failed to run the renew command", + config: Config{ + Staging: false, + }, + expectCalls: func(executorMock *gatewayMocks.Executor) { + executorMock.On("Command", "certbot", "renew").Return(exec.Command("")).Once() + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(errors.New("failed to run the renew command")).Once() + }, + expected: errors.New("failed to run the renew command"), + }, + { + name: "successful renew command execution", + config: Config{ + Staging: false, + }, + expectCalls: func(executorMock *gatewayMocks.Executor) { + executorMock.On("Command", "certbot", "renew").Return(exec.Command("")).Once() + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + expected: nil, + }, + { + name: "successful renew command execution in staging", + config: Config{ + Staging: true, + }, + expectCalls: func(executorMock *gatewayMocks.Executor) { + executorMock.On("Command", "certbot", "renew", "--staging").Return(exec.Command("")).Once() + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + expected: nil, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + executorMock := new(gatewayMocks.Executor) + + certbot := newCertBot(&tc.config) + certbot.ex = executorMock + + tc.expectCalls(executorMock) + + err := certbot.executeRenewCertificates() + assert.Equal(tt, tc.expected, err) + + executorMock.AssertExpectations(t) + }) + } +} + +func TestCertBot_renewCertificates(t *testing.T) { + duration := 100 * time.Millisecond + + tests := []struct { + name string + config Config + expectCalls func(*gatewayMocks.Executor, *gatewayMocks.Ticker) + shouldRenewCalled bool + }{ + { + name: "failed renewal", + config: Config{ + Staging: false, + }, + expectCalls: func(executorMock *gatewayMocks.Executor, tickerMock *gatewayMocks.Ticker) { + tickerMock.On("Init", mock.Anything, mock.Anything).Once() + tickerMock.On("Stop").Once() + + ch := make(chan time.Time, 1) + ch <- time.Now() + tickerMock.On("Tick").Return(ch).Once() + + executorMock.On("Command", "certbot", + "renew", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(errors.New("failed to renew")).Once() + }, + shouldRenewCalled: false, + }, + { + name: "failed renewal more than run time", + config: Config{ + Staging: false, + }, + expectCalls: func(executorMock *gatewayMocks.Executor, tickerMock *gatewayMocks.Ticker) { + tickerMock.On("Init", mock.Anything, mock.Anything).Once() + tickerMock.On("Stop").Once() + + ch := make(chan time.Time, 2) + ch <- time.Now() + ch <- time.Now() + tickerMock.On("Tick").Return(ch).Once() + + executorMock.On("Command", "certbot", + "renew", + ).Return(exec.Command("")).Twice() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(errors.New("failed to renew")).Twice() + }, + shouldRenewCalled: false, + }, + { + name: "success to renew after failure", + config: Config{ + Staging: false, + }, + expectCalls: func(executorMock *gatewayMocks.Executor, tickerMock *gatewayMocks.Ticker) { + tickerMock.On("Init", mock.Anything, mock.Anything).Once() + tickerMock.On("Stop").Once() + + ch := make(chan time.Time, 2) + ch <- time.Now() + tickerMock.On("Tick").Return(ch).Once() + + executorMock.On("Command", "certbot", + "renew", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(errors.New("failed to renew")).Once() + + ch <- time.Now() + executorMock.On("Command", "certbot", + "renew", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + shouldRenewCalled: true, + }, + { + name: "success to renew", + config: Config{ + Staging: false, + }, + expectCalls: func(executorMock *gatewayMocks.Executor, tickerMock *gatewayMocks.Ticker) { + tickerMock.On("Init", mock.Anything, mock.Anything).Once() + tickerMock.On("Stop").Once() + + ch := make(chan time.Time, 1) + ch <- time.Now() + tickerMock.On("Tick").Return(ch).Once() + + executorMock.On("Command", "certbot", + "renew", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + shouldRenewCalled: true, + }, + { + name: "success to renew more than one time", + config: Config{ + Staging: false, + }, + expectCalls: func(executorMock *gatewayMocks.Executor, tickerMock *gatewayMocks.Ticker) { + tickerMock.On("Init", mock.Anything, mock.Anything).Once() + tickerMock.On("Stop").Once() + + ch := make(chan time.Time, 2) + ch <- time.Now() + ch <- time.Now() + tickerMock.On("Tick").Return(ch).Once() + + executorMock.On("Command", "certbot", + "renew", + ).Return(exec.Command("")).Twice() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Twice() + }, + shouldRenewCalled: true, + }, + { + name: "success to renew on staging", + config: Config{ + Staging: true, + }, + expectCalls: func(executorMock *gatewayMocks.Executor, tickerMock *gatewayMocks.Ticker) { + tickerMock.On("Init", mock.Anything, mock.Anything).Once() + tickerMock.On("Stop").Once() + + ch := make(chan time.Time, 1) + ch <- time.Now() + tickerMock.On("Tick").Return(ch).Once() + + executorMock.On("Command", "certbot", + "renew", + "--staging", + ).Return(exec.Command("")).Once() + + executorMock.On("Run", mock.AnythingOfType("*exec.Cmd")).Return(nil).Once() + }, + shouldRenewCalled: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), duration) + defer cancel() + + tickerMock := new(gatewayMocks.Ticker) + executorMock := new(gatewayMocks.Executor) + + config := &tc.config + + renewWasCalled := false + config.RenewedCallback = func() { + renewWasCalled = true + } + + certbot := newCertBot(config) + certbot.tk = tickerMock + certbot.ex = executorMock + + tc.expectCalls(executorMock, tickerMock) + + done := make(chan struct{}) + go func() { + certbot.renewCertificates(ctx, duration) + close(done) + }() + + <-done + + assert.Equal(tt, tc.shouldRenewCalled, renewWasCalled) + + tickerMock.AssertExpectations(tt) + executorMock.AssertExpectations(tt) + }) + } +} diff --git a/gateway/conf.d/shellhub.conf b/gateway/conf.d/shellhub.conf deleted file mode 100644 index c11419d80d0..00000000000 --- a/gateway/conf.d/shellhub.conf +++ /dev/null @@ -1,451 +0,0 @@ -server { - {{ if and (bool (env.Getenv "SHELLHUB_AUTO_SSL")) (ne (env.Getenv "SHELLHUB_ENV") "development") -}} - listen 443 reuseport ssl{{ if bool (env.Getenv "SHELLHUB_PROXY") }} proxy_protocol{{ end }}; - ssl_certificate /etc/letsencrypt/live/{{ env.Getenv "SHELLHUB_DOMAIN" }}/fullchain.pem; - ssl_certificate_key /etc/letsencrypt/live/{{ env.Getenv "SHELLHUB_DOMAIN" }}/privkey.pem; - - ssl_session_cache shared:le_nginx_SSL:10m; - ssl_session_timeout 10m; - ssl_session_tickets off; - - ssl_dhparam /etc/letsencrypt/live/{{ env.Getenv "SHELLHUB_DOMAIN" }}/dhparam.pem; - - ssl_protocols TLSv1.2 TLSv1.3; - ssl_prefer_server_ciphers off; - - ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384"; - {{ else -}} - listen 80 reuseport{{ if bool (env.Getenv "SHELLHUB_PROXY") }} proxy_protocol{{ end }}; - {{- end }} - {{ if bool (env.Getenv "SHELLHUB_PROXY") }} - set_real_ip_from ::/0; - real_ip_header proxy_protocol; - {{ end }} - server_name {{ (env.Getenv "SHELLHUB_DOMAIN") }}; - resolver 127.0.0.11 ipv6=off; - - # Load configuration files for the default server block - include /etc/nginx/default.d/*.conf; - - location / { - set $upstream ui:8080; - - add_header Cache-Control "no-cache, no-store"; - add_header Pragma "no-cache"; - - proxy_pass http://$upstream; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection 'upgrade'; - proxy_set_header Host $host; - proxy_http_version 1.1; - proxy_cache_bypass $http_upgrade; - proxy_redirect off; - } - - location ^~ /.well-known/acme-challenge/ { - default_type "text/plain"; - - root /etc/letsencrypt; - } - - location /api { - set $upstream api:8080; - - auth_request /auth; - auth_request_set $tenant_id $upstream_http_x_tenant_id; - auth_request_set $username $upstream_http_x_username; - auth_request_set $id $upstream_http_x_id; - auth_request_set $mfa $upstream_http_x_mfa; - auth_request_set $validate $upstream_http_x_validate_mfa; - auth_request_set $role $upstream_http_x_role; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-ID $id; - proxy_set_header X-Tenant-ID $tenant_id; - proxy_set_header X-Username $username; - proxy_set_header X-Request-ID $request_id; - proxy_set_header X-MFA $mfa; - proxy_set_header X-Validate-MFA $validate; - proxy_set_header X-Role $role; - proxy_pass http://$upstream; - } - - location ~ ^/(install.sh|kickstart.sh)$ { - set $upstream api:8080; - rewrite ^/(.*)$ /api/install break; - proxy_set_header X-Forwarded-Host $host; - proxy_set_header X-Forwarded-Proto $x_forwarded_proto; - proxy_set_header X-Forwarded-Port $x_forwarded_port; - proxy_pass http://$upstream; - } - - location /api/auth/user { - set $upstream api:8080; - - auth_request /auth/skip; - auth_request_set $tenant_id $upstream_http_x_tenant_id; - auth_request_set $username $upstream_http_x_username; - auth_request_set $id $upstream_http_x_id; - auth_request_set $role $upstream_http_x_role; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-ID $id; - proxy_set_header X-Tenant-ID $tenant_id; - proxy_set_header X-Username $username; - proxy_set_header X-Request-ID $request_id; - proxy_set_header X-Role $role; - proxy_pass http://$upstream; - } - - {{ if eq (env.Getenv "SHELLHUB_ENV") "development" -}} - location /openapi/preview { - set $upstream openapi:8080; - rewrite ^/openapi/preview/?(.*)$ /$1 break; - proxy_pass http://$upstream; - } - - location /openapi/mock { - set $upstream openapi:4010; - rewrite ^/openapi/mock/?(.*)$ /$1 break; - proxy_pass http://$upstream; - } - - location /openapi/proxy { - set $upstream openapi:4020; - rewrite ^/openapi/proxy/?(.*)$ /$1 break; - proxy_pass http://$upstream; - } - {{- end }} - - {{ if bool (env.Getenv "SHELLHUB_ENTERPRISE") -}} - location /admin/dashboard/ { - set $upstream dashboard:8080; - add_header Cache-Control "no-cache, no-store"; - add_header Pragma "no-cache"; - - {{ if ne (env.Getenv "SHELLHUB_ENV") "development" -}} - rewrite ^/admin/dashboard/(.*)$ /$1 break; - {{- end }} - - proxy_pass http://$upstream; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection 'upgrade'; - proxy_set_header Host $host; - proxy_http_version 1.1; - proxy_cache_bypass $http_upgrade; - proxy_redirect off; - } - - location ~ ^/admin/?$ { - rewrite ^/admin/?$ /admin/dashboard permanent; - } - - location /admin { - set $upstream admin-api:8080; - error_page 500 =401; - rewrite ^/admin/(.*)$ /$1 break; - proxy_pass http://$upstream; - } - {{ end -}} - - location /ssh/connection { - set $upstream ssh:8080; - auth_request /auth; - auth_request_set $device_uid $upstream_http_x_device_uid; - proxy_pass http://$upstream; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection 'upgrade'; - proxy_set_header Host $host; - {{ if bool (env.Getenv "SHELLHUB_PROXY") -}} - proxy_set_header X-Real-IP $proxy_protocol_addr; - {{ else -}} - proxy_set_header X-Real-IP $x_real_ip; - {{ end -}} - proxy_set_header X-Device-UID $device_uid; - proxy_http_version 1.1; - proxy_cache_bypass $http_upgrade; - proxy_redirect off; - } - - location /ssh/revdial { - set $upstream ssh:8080; - proxy_pass http://$upstream; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection 'upgrade'; - proxy_set_header Host $host; - {{ if bool (env.Getenv "SHELLHUB_PROXY") -}} - proxy_set_header X-Real-IP $proxy_protocol_addr; - {{ else -}} - proxy_set_header X-Real-IP $x_real_ip; - {{ end -}} - proxy_http_version 1.1; - proxy_cache_bypass $http_upgrade; - proxy_redirect off; - } - - location /ssh/auth { - set $upstream api:8080; - auth_request /auth; - auth_request_set $device_uid $upstream_http_x_device_uid; - error_page 500 =401 /auth; - proxy_pass http://$upstream; - proxy_set_header X-Device-UID $device_uid; - } - - {{ if bool (env.Getenv "SHELLHUB_CLOUD") -}} - location /api/announcements { - set $upstream cloud-api:8080; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_CLOUD") -}} - location /api/billing { - set $upstream billing-api:8080; - auth_request /auth; - auth_request_set $tenant_id $upstream_http_x_tenant_id; - auth_request_set $username $upstream_http_x_username; - auth_request_set $id $upstream_http_x_id; - auth_request_set $role $upstream_http_x_role; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-Tenant-ID $tenant_id; - proxy_set_header X-Username $username; - proxy_set_header X-ID $id; - proxy_set_header X-Role $role; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_ENTERPRISE") -}} - location /api/firewall { - set $upstream cloud-api:8080; - auth_request /auth; - auth_request_set $tenant_id $upstream_http_x_tenant_id; - auth_request_set $username $upstream_http_x_username; - auth_request_set $id $upstream_http_x_id; - auth_request_set $role $upstream_http_x_role; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-Tenant-ID $tenant_id; - proxy_set_header X-Username $username; - proxy_set_header X-ID $id; - proxy_set_header X-Role $role; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_ENTERPRISE") -}} - location /api/mfa/recovery { - set $upstream cloud-api:8080; - auth_request /auth/skip; - auth_request_set $id $upstream_http_x_id; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-ID $id; - proxy_pass http://$upstream; - } - - location /api/mfa/auth { - set $upstream cloud-api:8080; - auth_request /auth/skip; - auth_request_set $id $upstream_http_x_id; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-ID $id; - proxy_pass http://$upstream; - } - - location /api/mfa { - set $upstream cloud-api:8080; - auth_request /auth; - auth_request_set $id $upstream_http_x_id; - auth_request_set $username $upstream_http_x_username; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-ID $id; - proxy_set_header X-Username $username; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_ENTERPRISE") -}} - location /api/register { - set $upstream cloud-api:8080; - proxy_set_header X-Forwarded-Host $host; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_CLOUD") -}} - location /api/user/recover_password { - set $upstream cloud-api:8080; - proxy_set_header X-Forwarded-Host $host; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_CLOUD") -}} - location ~* /api/user/(.*)/update_password { - set $upstream cloud-api:8080; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_CLOUD") -}} - location /api/user/resend_email { - set $upstream cloud-api:8080; - proxy_set_header X-Forwarded-Host $host; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_CLOUD") -}} - location /api/user/validation_account { - set $upstream cloud-api:8080; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_ENTERPRISE") -}} - location ~* /api/sessions/(.*)/record { - set $upstream cloud-api:8080; - auth_request /auth; - auth_request_set $tenant_id $upstream_http_x_tenant_id; - auth_request_set $username $upstream_http_x_username; - auth_request_set $role $upstream_http_x_role; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-Tenant-ID $tenant_id; - proxy_set_header X-Username $username; - proxy_set_header X-Role $role; - proxy_pass http://$upstream; - } - {{ end -}} - - {{ if bool (env.Getenv "SHELLHUB_ENTERPRISE") -}} - location ~* /api/sessions/(.*)/play { - set $upstream cloud-api:8080; - auth_request /auth; - auth_request_set $tenant_id $upstream_http_x_tenant_id; - auth_request_set $username $upstream_http_x_username; - auth_request_set $role $upstream_http_x_role; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_set_header X-Tenant-ID $tenant_id; - proxy_set_header X-Username $username; - proxy_set_header X-Role $role; - proxy_pass http://$upstream; - } - {{ end -}} - - location ~* /api/sessions/(.*)/close { - set $upstream ssh:8080; - auth_request /auth; - auth_request_set $tenant_id $upstream_http_x_tenant_id; - auth_request_set $role $upstream_http_x_role; - error_page 500 =401 /auth; - rewrite ^/api/(.*)$ /$1 break; - proxy_set_header X-Tenant-ID $tenant_id; - proxy_set_header X-Role $role; - proxy_pass http://$upstream; - } - - location /api/devices/auth { - set $upstream api:8080; - auth_request off; - rewrite ^/api/(.*)$ /api/$1 break; - {{ if bool (env.Getenv "SHELLHUB_PROXY") -}} - proxy_set_header X-Real-IP $proxy_protocol_addr; - {{ else -}} - proxy_set_header X-Real-IP $x_real_ip; - {{ end -}} - proxy_pass http://$upstream; - } - - location /api/login { - set $upstream api:8080; - auth_request off; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_pass http://$upstream; - } - - location /api/webhook-billing { - set $upstream billing-api:8080; - auth_request off; - rewrite ^/api/(.*)$ /api/$1 break; - proxy_pass http://$upstream; - } - - location /auth { - set $upstream_auth api:8080; - internal; - rewrite ^/(.*)$ /internal/$1 break; - proxy_pass http://$upstream_auth; - } - - location /auth/skip { - set $upstream_auth api:8080; - internal; - rewrite ^/auth/(.*)$ /internal/auth?args=$1 break; - proxy_pass http://$upstream_auth; - } - - location /ws { - set $upstream ssh:8080; - proxy_pass http://$upstream; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection 'upgrade'; - proxy_set_header Host $host; - - {{ if bool (env.Getenv "SHELLHUB_PROXY") -}} - proxy_set_header X-Real-IP $proxy_protocol_addr; - {{ else -}} - proxy_set_header X-Real-IP $x_real_ip; - {{ end -}} - proxy_http_version 1.1; - proxy_cache_bypass $http_upgrade; - proxy_redirect off; - } - - location /info { - set $upstream api:8080; - - proxy_set_header X-Forwarded-Host $host; - proxy_set_header X-Forwarded-Port $x_forwarded_port; - proxy_pass http://$upstream; - rewrite ^/(.*)$ /api/info break; - } - - location = /nginx_status { - stub_status; - allow 127.0.0.1; - allow {{ env.Getenv "HOST_IP" }}; - deny all; - } -} - -{{- $PUBLIC_URL_DOMAIN := or (env.Getenv "SHELLHUB_PUBLIC_URL_DOMAIN") (env.Getenv "SHELLHUB_DOMAIN") }} -server { - listen 80; - server_name ~^(?.+)\.(?.+)\.{{ $PUBLIC_URL_DOMAIN }}$; - resolver 127.0.0.11 ipv6=off; - - location / { - set $upstream ssh:8080; - - rewrite ^/(.*)$ /ssh/http break; - proxy_set_header X-Public-URL-Address $device; - proxy_set_header X-Path /$1$is_args$args; - proxy_pass http://$upstream; - } -} - -{{ if and (bool (env.Getenv "SHELLHUB_AUTO_SSL")) (ne (env.Getenv "SHELLHUB_ENV") "development") -}} -server { - listen 80 default_server; - - return 308 https://$host$request_uri; -} -{{- end }} diff --git a/gateway/config.go b/gateway/config.go new file mode 100644 index 00000000000..e94ed570806 --- /dev/null +++ b/gateway/config.go @@ -0,0 +1,75 @@ +package main + +import ( + "context" + "fmt" + "runtime" + + "github.com/go-playground/validator/v10" + "github.com/sethvargo/go-envconfig" +) + +// GatewayConfig holds the configuration settings for the gateway. +type GatewayConfig struct { + Env string `env:"SHELLHUB_ENV"` + Domain string `env:"SHELLHUB_DOMAIN,required" validate:"hostname"` + WebEndpoints bool `env:"SHELLHUB_WEB_ENDPOINTS,default=false"` + WebEndpointsDomain string `env:"SHELLHUB_WEB_ENDPOINTS_DOMAIN"` + WebEndpointsDNSProvider DNSProvider `env:"SHELLHUB_WEB_ENDPOINTS_DNS_PROVIDER,default=digitalocean"` + WebEndpointsDNSProviderToken string `env:"SHELLHUB_WEB_ENDPOINTS_DNS_PROVIDER_TOKEN"` + WebEndpointsAcmeDNSURL string `env:"SHELLHUB_WEB_ENDPOINTS_ACME_DNS_URL"` + WebEndpointsAcmeDNSUsername string `env:"SHELLHUB_WEB_ENDPOINTS_ACME_DNS_USERNAME"` + WebEndpointsAcmeDNSPassword string `env:"SHELLHUB_WEB_ENDPOINTS_ACME_DNS_PASSWORD"` + WebEndpointsAcmeDNSSubdomain string `env:"SHELLHUB_WEB_ENDPOINTS_ACME_DNS_SUBDOMAIN"` + WorkerProcesses string `env:"WORKER_PROCESSES,default=auto"` + MaxWorkerOpenFiles int `env:"MAX_WORKER_OPEN_FILES,default=0"` + MaxWorkerConnections int `env:"MAX_WORKER_CONNECTIONS,default=16384"` + BacklogSize int `env:"BACKLOG_SIZE"` + EnableAutoSSL bool `env:"SHELLHUB_AUTO_SSL"` + EnableProxyProtocol bool `env:"SHELLHUB_PROXY"` + EnableEnterprise bool `env:"SHELLHUB_ENTERPRISE"` + EnableCloud bool `env:"SHELLHUB_CLOUD"` + EnableAccessLogs bool `env:"SHELLHUB_GATEWAY_ACCESS_LOGS" default:"true"` + APIRateLimit string `env:"SHELLHUB_API_RATE_LIMIT,default=1000r/s"` + APIRateLimitZoneSize string `env:"SHELLHUB_API_RATE_LIMIT_ZONE_SIZE,default=10m"` + APIBurstSize string `env:"SHELLHUB_API_BURST_SIZE,default=1"` + APIBurstDelay string `env:"SHELLHUB_API_BURST_DELAY,default=nodelay"` +} + +var validate = validator.New() + +// loadGatewayConfig loads and validates the configuration from environment variables. +func loadGatewayConfig() (*GatewayConfig, error) { + var config GatewayConfig + if err := envconfig.Process(context.Background(), &config); err != nil { + return nil, err + } + + config.applyDefaults() + + if err := validate.Struct(config); err != nil { + return nil, err + } + + return &config, nil +} + +// applyDefaults sets default values for the GatewayConfig if not provided. +func (gc *GatewayConfig) applyDefaults() { + if gc.WorkerProcesses == "auto" { + gc.WorkerProcesses = fmt.Sprintf("%d", runtime.NumCPU()) + } + + if gc.MaxWorkerOpenFiles == 0 { + gc.MaxWorkerOpenFiles = rlimitMaxNumFiles() - 1024 + if gc.MaxWorkerOpenFiles < 1024 { + gc.MaxWorkerOpenFiles = 1024 + } + } + + if gc.MaxWorkerConnections == 0 { + gc.MaxWorkerConnections = int(float64(gc.MaxWorkerOpenFiles * 3.0 / 4)) + } + + gc.BacklogSize = getSysctl("net.core.somaxconn") +} diff --git a/gateway/entrypoint-dev.sh b/gateway/entrypoint-dev.sh new file mode 100755 index 00000000000..269beeec38f --- /dev/null +++ b/gateway/entrypoint-dev.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +ln -sf $PWD/gateway /gateway + +air diff --git a/gateway/entrypoint.sh b/gateway/entrypoint.sh deleted file mode 100755 index 7261e83a15e..00000000000 --- a/gateway/entrypoint.sh +++ /dev/null @@ -1,132 +0,0 @@ -#!/bin/sh - -# Same tunning settings applied by NGINX Ingress Controller -# https://github.com/kubernetes/ingress-nginx/blob/844a02c276788e293480c080fe09f4d242545c82/internal/ingress/controller/nginx.go#L512 - -WORKER_PROCESSES="${WORKER_PROCESSES:-auto}" -MAX_WORKER_OPEN_FILES="${MAX_WORKER_OPEN_FILES:-0}" -MAX_WORKER_CONNECTIONS="${MAX_WORKER_CONNECTIONS:-16384}" - -if [ "$WORKER_PROCESSES" == "auto" ]; then - WORKER_PROCESSES=$(nproc) -fi - -if [ "$MAX_WORKER_OPEN_FILES" == "0" ]; then - MAX_WORKER_OPEN_FILES=$(($(ulimit -Sn) - 1024)) - - if [ "$MAX_WORKER_OPEN_FILES" -lt "1024" ]; then - MAX_WORKER_OPEN_FILES=1024 - fi -fi - -if [ "$MAX_WORKER_CONNECTIONS" == "0" ]; then - MAX_WORKER_CONNECTIONS=$(($MAX_WORKER_OPEN_FILES * 3 / 4)) -fi - -export WORKER_PROCESSES -export MAX_WORKER_OPEN_FILES -export MAX_WORKER_CONNECTIONS -export HOST_IP=$(ip -4 route show default | awk '{ print $3 }') - -wait_for_acme_webserver() { - for i in `seq 30` ; do - nc -z localhost 80 > /dev/null 2>&1 - - if [ $? -eq 0 ] ; then - return - fi - - sleep 1 - done - - echo "Timed out waiting for ACME webserver" >&2 - - exit 1 -} - -# The certificate generation is only available in production mode and if the SHELLHUB_AUTO_SSL is set to true. -if [ "$SHELLHUB_ENV" != "development" ] && [ "$SHELLHUB_AUTO_SSL" == "true" ]; then - if [ -z "$SHELLHUB_DOMAIN" ]; then - echo "SHELLHUB_DOMAIN cannot be empty" - exit 1 - fi - - if ! echo "$SHELLHUB_DOMAIN" | grep -qE '^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$'; then - echo "SHELLHUB_DOMAIN must be a valid domain name" - exit 1 - fi - - ACME_WEBSERVER_ROOT="/etc/letsencrypt" - - # If the certificate is not generated yet, generate it. - if [ ! -f /etc/letsencrypt/live/$SHELLHUB_DOMAIN/fullchain.pem ]; then - echo "Generating SSL certificate" - - ACME_CHALLENGE_DIR="$ACME_WEBSERVER_ROOT/.well-known/acme-challenge" - - mkdir -p $ACME_CHALLENGE_DIR - - # We need to ensure that acme challenge webserver is running before running certbot, - # as we are utilizing the webroot mode, which relies on a running local webserver - ACME_WEBSERVER_PID=$(cd $ACME_WEBSERVER_ROOT; python -m http.server 80 > /dev/null 2>&1 & echo $!) - wait_for_acme_webserver - - certbot certonly --non-interactive --agree-tos --register-unsafely-without-email --webroot --webroot-path $ACME_WEBSERVER_ROOT --preferred-challenges http -n -d $SHELLHUB_DOMAIN - if [ $? -ne 0 ]; then - echo "Failed to generate SSL certificate" - exit 1 - fi - - echo "SSL certificate successfully generated" - - curl https://ssl-config.mozilla.org/ffdhe2048.txt > /etc/letsencrypt/live/$SHELLHUB_DOMAIN/dhparam.pem 2> /dev/null - if [ $? -ne 0 ]; then - echo "Failed to download Mozilla's DH parameters" - exit 1 - fi - - echo "Mozilla's DH parameters successfully downloaded" - - kill $ACME_WEBSERVER_PID - fi - - # Loop every 24 hours to check if certificate is about to expire. - # It is safe to run the renew each 24 hours because the command will only renew if the certificate is about to expire. - # About to expire is defined as: "if a certificate is going to expire in less than 30 days, it will be renewed." - # https://eff-certbot.readthedocs.io/en/stable/using.html#renewing-certificates - while sleep "24h" ; do - # If certificate is already generated, check if it is about to expire. If so, renew it. - # You may want to renew near the 60 day mark, to provide enough time for any possible problems that may arise. - # https://letsencrypt.org/docs/faq/#what-is-the-lifetime-for-let-s-encrypt-certificates-for-how-long-are-they-valid - - echo "Checking if SSL certificate needs to be renewed" - certbot renew --webroot --webroot-path $ACME_WEBSERVER_ROOT - if [ $? -ne 0 ]; then - echo "Failed to renew SSL certificate" - exit 1 - fi - - nginx -s reload - - echo "SSL certificate successfully renewed" - done & -fi - -generate() { - gomplate -f /app/nginx.conf -o /etc/nginx/nginx.conf - gomplate -f /app/conf.d/shellhub.conf -o /etc/nginx/conf.d/shellhub.conf -} - -if [ "$SHELLHUB_ENV" == "development" ]; then - while inotifywait -q -r -e close_write "/app/nginx.conf" "/app/conf.d/" > /dev/null; do - generate - nginx -s reload - done & -fi - -generate - -mkdir -p /var/run/nginx - -echo "Starting NGINX" -exec "$@" diff --git a/gateway/go.mod b/gateway/go.mod new file mode 100644 index 00000000000..04d5ff4fcda --- /dev/null +++ b/gateway/go.mod @@ -0,0 +1,77 @@ +module github.com/shellhub-io/shellhub/gateway + +go 1.24.9 + +require ( + github.com/fsnotify/fsnotify v1.7.0 + github.com/go-playground/validator/v10 v10.22.0 + github.com/pkg/errors v0.9.1 + github.com/sethvargo/go-envconfig v0.9.0 + github.com/shellhub-io/shellhub v0.0.0-00010101000000-000000000000 + github.com/sirupsen/logrus v1.9.3 + github.com/spf13/afero v1.14.0 + github.com/stretchr/testify v1.10.0 + github.com/testcontainers/testcontainers-go v0.32.0 + golang.org/x/sys v0.38.0 +) + +require ( + dario.cat/mergo v1.0.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/Microsoft/hcsshim v0.11.7 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/containerd/containerd v1.7.29 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/platforms v0.2.1 // indirect + github.com/cpuguy83/dockercfg v0.3.1 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/docker v28.0.0+incompatible // indirect + github.com/docker/go-connections v0.5.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/klauspost/compress v1.17.4 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/sequential v0.5.0 // indirect + github.com/moby/sys/user v0.3.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect + github.com/moby/term v0.5.0 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.0 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect + github.com/shirou/gopsutil/v3 v3.23.12 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 // indirect + go.opentelemetry.io/otel v1.37.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0 // indirect + go.opentelemetry.io/otel/metric v1.37.0 // indirect + go.opentelemetry.io/otel/trace v1.37.0 // indirect + go.opentelemetry.io/proto/otlp v1.2.0 // indirect + golang.org/x/crypto v0.45.0 // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/text v0.31.0 // indirect + google.golang.org/grpc v1.63.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) + +replace github.com/shellhub-io/shellhub => ../ diff --git a/gateway/go.sum b/gateway/go.sum new file mode 100644 index 00000000000..d37cea383bc --- /dev/null +++ b/gateway/go.sum @@ -0,0 +1,219 @@ +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.11.7 h1:vl/nj3Bar/CvJSYo7gIQPyRWc9f3c6IeSNavBTSZNZQ= +github.com/Microsoft/hcsshim v0.11.7/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA09d4bExKcU= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/containerd/containerd v1.7.29 h1:90fWABQsaN9mJhGkoVnuzEY+o1XDPbg9BTC9QTAHnuE= +github.com/containerd/containerd v1.7.29/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v28.0.0+incompatible h1:Olh0KS820sJ7nPsBKChVhk5pzqcwDR15fumfAd/p9hM= +github.com/docker/docker v28.0.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= +github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= +github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao= +github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 h1:/c3QmbOGMGTOumP2iT/rCwB7b0QDGLKzqOmktBjT+Is= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1/go.mod h1:5SN9VR2LTsRFsrEC6FHgRbTWrTHu6tqPeKxEQv15giM= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= +github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/user v0.3.0 h1:9ni5DlcW5an3SvRSx4MouotOygvzaXbaSrc/wGDFWPo= +github.com/moby/sys/user v0.3.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/sethvargo/go-envconfig v0.9.0 h1:Q6FQ6hVEeTECULvkJZakq3dZMeBQ3JUpcKMfPQbKMDE= +github.com/sethvargo/go-envconfig v0.9.0/go.mod h1:Iz1Gy1Sf3T64TQlJSvee81qDhf7YIlt8GMUX6yyNFs0= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA= +github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/testcontainers/testcontainers-go v0.32.0 h1:ug1aK08L3gCHdhknlTTwWjPHPS+/alvLJU/DRxTD/ME= +github.com/testcontainers/testcontainers-go v0.32.0/go.mod h1:CRHrzHLQhlXUsa5gXjTOfqIEJcrK5+xMDmBr/WMI88E= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 h1:Xs2Ncz0gNihqu9iosIZ5SkBbWo5T8JhhLJFMQL1qmLI= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0/go.mod h1:vy+2G/6NvVMpwGX/NyLqcC41fxepnuKHk16E6IZUcJc= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0 h1:1u/AyyOqAWzy+SkPxDpahCNZParHV8Vid1RnI2clyDE= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0/go.mod h1:z46paqbJ9l7c9fIPCXTqTGwhQZ5XoTIsfeFYWboizjs= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0 h1:1wp/gyxsuYtuE/JFxsQRtcCDtMrO2qMvlfXALU5wkzI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0/go.mod h1:gbTHmghkGgqxMomVQQMur1Nba4M0MQ8AYThXDUjsJ38= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/sdk v1.26.0 h1:Y7bumHf5tAiDlRYFmGqetNcLaVUZmh4iYfmGxtmz7F8= +go.opentelemetry.io/otel/sdk v1.26.0/go.mod h1:0p8MXpqLeJ0pzcszQQN4F0S5FVjBLgypeGSngLsmirs= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +go.opentelemetry.io/proto/otlp v1.2.0 h1:pVeZGk7nXDC9O2hncA6nHldxEjm6LByfA2aN8IOkz94= +go.opentelemetry.io/proto/otlp v1.2.0/go.mod h1:gGpR8txAl5M03pDhMC79G6SdqNV26naRm/KDsgaHD8A= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de h1:jFNzHPIeuzhdRwVhbZdiym9q0ory/xY3sA+v2wPg8I0= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:5iCWqnniDlqZHrd3neWVTOwvh/v6s3232omMecelax8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda h1:LI5DOvAxUPMv/50agcLLoo+AdWc1irS9Rzz4vPuD1V4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= +google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM= +google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA= +google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= +google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= diff --git a/gateway/main.go b/gateway/main.go new file mode 100644 index 00000000000..7630a11c37b --- /dev/null +++ b/gateway/main.go @@ -0,0 +1,188 @@ +package main + +import ( + "context" + "slices" + "time" + + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/loglevel" + log "github.com/sirupsen/logrus" +) + +const ( + // defaultNginxRootDir is the default base directory for Nginx configuration files. + // This directory typically contains the main nginx.conf and site configuration files. + defaultNginxRootDir = "/etc/nginx" + + // defaultNginxTemplateDir is the default directory where Nginx template files are stored. + // Template files are used to generate dynamic Nginx configurations based on runtime settings. + defaultNginxTemplateDir = "/templates" + + // defaultCertBotRootDir is the default directory where Certbot keeps + // generated certificates, keys, and related assets. + // This follows the standard Let's Encrypt directory structure. + defaultCertBotRootDir = "/etc/letsencrypt" + + // defaultTickerRenewCertificates defines the interval for automatic certificate renewal checks. + // Certificates are checked for renewal every 24 hours to ensure they remain valid. + defaultTickerRenewCertificates = 24 * time.Hour +) + +const ( + // SSLFeature indicates that SSL's feature is eanbled + SSLFeature = "ssl" + // WebEndpointsFeature indicates that WebEndpoints' feature is eanbled. + WebEndpointsFeature = "feature" +) + +// Gateway represents the main gateway service that orchestrates Nginx configuration +// management and SSL certificate provisioning. +type Gateway struct { + // Config holds the gateway's configuration settings including domain, + // environment, and SSL settings. + Config *GatewayConfig + + // Controller manages Nginx configuration generation, template processing, + // and server lifecycle operations. + Controller *NginxController + + // Certbot handles SSL certificate provisioning and renewal through Let's Encrypt. + // This field is nil when SSL is not enabled. + Certbot *CertBot + + // Features contains feature flags to gateway. + Features []string +} + +// NewGateway creates a new Gateway instance with the provided configuration and controller. +// The Certbot component is initially set to nil and will be initialized only when +// SSL is explicitly enabled through EnableSSL(). +func NewGateway(config *GatewayConfig, controller *NginxController, features []string) *Gateway { + g := &Gateway{ + Config: config, + Controller: controller, + Certbot: nil, + } + + // NOTE: [SSLFeature] indicates that SSL's feature is eanbled, configuring SSL certificate management for the + // gateway. It sets up Certbot with the gateway's domain configuration and establishes automatic certificate + // provisioning and renewal. The renewal callback is configured to reload Nginx when certificates are + // renewed, ensuring the server uses the latest certificates without manual intervention. + if slices.Contains(features, SSLFeature) { + g.Certbot = newCertBot(&Config{ + RootDir: defaultCertBotRootDir, + RenewedCallback: g.Controller.reload, + }) + + g.Certbot.Certificates = append( + g.Certbot.Certificates, + NewDefaultCertificate(defaultCertBotRootDir, g.Config.Domain), + ) + } + + // NOTE: [WebEndpointsFeature] indicates that WebEndpoints' feature is enabled, configuring necessary values to work with + // SSL's enabled. + if slices.Contains(features, WebEndpointsFeature) { + if g.Certbot != nil { + if g.Config.WebEndpointsDomain == "" { + g.Config.WebEndpointsDomain = g.Config.Domain + } + + g.Certbot.Certificates = append( + g.Certbot.Certificates, + NewWebEndpointsCertificate( + g.Config.WebEndpointsDomain, + g.Config.WebEndpointsDNSProvider, + g.Config.WebEndpointsDNSProviderToken, + g.Config.WebEndpointsAcmeDNSURL, + g.Config.WebEndpointsAcmeDNSUsername, + g.Config.WebEndpointsAcmeDNSPassword, + g.Config.WebEndpointsAcmeDNSSubdomain, + ), + ) + } + } + + return g +} + +// Watch enables live monitoring of Nginx configuration template files. +// +// This method is typically used in development environments to automatically +// detect and apply configuration changes without requiring service restarts. +// +// The watching mechanism monitors the template directory for file changes +// and triggers configuration regeneration when modifications are detected. +func (g *Gateway) Watch() { + go g.Controller.watchConfigTemplates() +} + +// Start begins the gateway service with the provided context. +// This method initializes all configured components and starts the main service loop. +func (g *Gateway) Start(ctx context.Context) { + log.Debug("start was called") + + if g.Certbot != nil { + g.Certbot.ensureCertificates() + g.Certbot.executeRenewCertificates() + + go g.Certbot.renewCertificates(ctx, defaultTickerRenewCertificates) + } + + g.Controller.generateConfigs() + g.Controller.start() +} + +func main() { + loglevel.UseEnvs() + + ctx := context.Background() + + config, err := loadGatewayConfig() + if err != nil { + log.Fatalf("failed to load configuration: %v", err) + } + + log.WithField("config", config).Info("configuration loaded") + + controller := &NginxController{ + gatewayConfig: config, + rootDir: defaultNginxRootDir, + templatesDir: defaultNginxTemplateDir, + } + + features := []string{} + + if config.EnableAutoSSL { + log.WithFields(log.Fields{ + "provider": config.WebEndpointsDNSProvider, + "token": halfString(config.WebEndpointsDNSProviderToken), + }).Info("auto ssl is enabled") + + features = append(features, SSLFeature) + } + + if config.WebEndpoints { + log.WithFields(log.Fields{ + "provider": config.WebEndpointsDNSProvider, + "token": halfString(config.WebEndpointsDNSProviderToken), + }).Info("tunnels info") + + features = append(features, WebEndpointsFeature) + } + + gateway := NewGateway(config, controller, features) + + log.Info("gateway created") + + if envs.IsDevelopment() { + log.Info("gateway running in development mode") + + log.Info("watch for nginx files is enabled") + gateway.Watch() + } + + log.Info("gateway started") + gateway.Start(ctx) +} diff --git a/gateway/main_test.go b/gateway/main_test.go new file mode 100644 index 00000000000..52405c2f2ad --- /dev/null +++ b/gateway/main_test.go @@ -0,0 +1,65 @@ +package main + +import ( + "context" + "fmt" + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +func TestMain_smoke(t *testing.T) { + ctx := context.Background() + + req := testcontainers.ContainerRequest{ + FromDockerfile: testcontainers.FromDockerfile{ + Context: "..", + Dockerfile: "gateway/Dockerfile", + Repo: "gateway", + Tag: "smoke", + }, + ExposedPorts: []string{"80/tcp"}, + Env: map[string]string{ + "SHELLHUB_DOMAIN": "localhost", + }, + WaitingFor: wait.ForListeningPort("80/tcp"). + WithStartupTimeout(60 * time.Second), + } + + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + }) + assert.NoError(t, err) + + defer func() { + if err := container.Terminate(ctx); err != nil { + t.Errorf("failed to terminate container: %v", err) + } + }() + + host, err := container.Host(ctx) + assert.NoError(t, err) + + port, err := container.MappedPort(ctx, "80") + assert.NoError(t, err) + + baseURL := fmt.Sprintf("http://%s:%s", host, port.Port()) + + t.Logf("gateway container listening at %s", baseURL) + + healthURL := baseURL + "/healthcheck" + + client := http.Client{Timeout: 5 * time.Second} + + resp, err := client.Get(healthURL) + assert.NoError(t, err) + + defer resp.Body.Close() + + assert.Equal(t, http.StatusOK, resp.StatusCode) +} diff --git a/gateway/mocks/executor.go b/gateway/mocks/executor.go new file mode 100644 index 00000000000..fc8582945c1 --- /dev/null +++ b/gateway/mocks/executor.go @@ -0,0 +1,73 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + exec "os/exec" + + mock "github.com/stretchr/testify/mock" +) + +// Executor is an autogenerated mock type for the Executor type +type Executor struct { + mock.Mock +} + +// Command provides a mock function with given fields: name, arg +func (_m *Executor) Command(name string, arg ...string) *exec.Cmd { + _va := make([]interface{}, len(arg)) + for _i := range arg { + _va[_i] = arg[_i] + } + var _ca []interface{} + _ca = append(_ca, name) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for Command") + } + + var r0 *exec.Cmd + if rf, ok := ret.Get(0).(func(string, ...string) *exec.Cmd); ok { + r0 = rf(name, arg...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*exec.Cmd) + } + } + + return r0 +} + +// Run provides a mock function with given fields: cmd +func (_m *Executor) Run(cmd *exec.Cmd) error { + ret := _m.Called(cmd) + + if len(ret) == 0 { + panic("no return value specified for Run") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*exec.Cmd) error); ok { + r0 = rf(cmd) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewExecutor creates a new instance of Executor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewExecutor(t interface { + mock.TestingT + Cleanup(func()) +}) *Executor { + mock := &Executor{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/gateway/mocks/ticker.go b/gateway/mocks/ticker.go new file mode 100644 index 00000000000..bcba8bd7f08 --- /dev/null +++ b/gateway/mocks/ticker.go @@ -0,0 +1,59 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + time "time" + + mock "github.com/stretchr/testify/mock" +) + +// Ticker is an autogenerated mock type for the Ticker type +type Ticker struct { + mock.Mock +} + +// Init provides a mock function with given fields: _a0, _a1 +func (_m *Ticker) Init(_a0 context.Context, _a1 time.Duration) { + _m.Called(_a0, _a1) +} + +// Stop provides a mock function with no fields +func (_m *Ticker) Stop() { + _m.Called() +} + +// Tick provides a mock function with no fields +func (_m *Ticker) Tick() chan time.Time { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Tick") + } + + var r0 chan time.Time + if rf, ok := ret.Get(0).(func() chan time.Time); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(chan time.Time) + } + } + + return r0 +} + +// NewTicker creates a new instance of Ticker. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewTicker(t interface { + mock.TestingT + Cleanup(func()) +}) *Ticker { + mock := &Ticker{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/gateway/nginx.conf b/gateway/nginx.conf deleted file mode 100644 index 8e5d21379d0..00000000000 --- a/gateway/nginx.conf +++ /dev/null @@ -1,59 +0,0 @@ -worker_processes {{ env.Getenv "WORKER_PROCESSES" }}; -worker_rlimit_nofile {{ env.Getenv "MAX_WORKER_OPEN_FILES" }}; - -events { - use epoll; - multi_accept on; - worker_connections {{ env.Getenv "MAX_WORKER_CONNECTIONS" }}; -} - -env SHELLHUB_VERSION; -env SHELLHUB_SSH_PORT; - -http { - aio threads; - aio_write on; - - tcp_nopush on; - tcp_nodelay on; - - sendfile on; - - reset_timedout_connection on; - - gzip on; - gzip_comp_level 1; - gzip_http_version 1.1; - gzip_min_length 256; - gzip_types application/atom+xml application/javascript application/x-javascript application/json application/rss+xml application/vnd.ms-fontobject application/x-font-ttf application/x-web-app-manifest+json application/xhtml+xml application/xml font/opentype image/svg+xml image/x-icon text/css text/javascript text/plain text/x-component; - gzip_proxied any; - gzip_vary on; - - include mime.types; - default_type application/octet-stream; - - client_body_temp_path /var/run/nginx/nginx-client-body; - proxy_temp_path /var/run/nginx/nginx-proxy; - - map $http_x_real_ip $x_real_ip { - default $http_x_real_ip; - "" $remote_addr; - } - - map $http_host $http_port { - default $server_port; - "~^[^\:]+:(?

\d+)$" $p; - } - - map $http_x_forwarded_proto $x_forwarded_proto { - default $http_x_forwarded_proto; - "" $scheme; - } - - map $http_x_forwarded_port $x_forwarded_port { - default $http_x_forwarded_port; - "" $http_port; - } - - include /etc/nginx/conf.d/*.conf; -} diff --git a/gateway/nginx.go b/gateway/nginx.go new file mode 100644 index 00000000000..fb08ced072a --- /dev/null +++ b/gateway/nginx.go @@ -0,0 +1,224 @@ +package main + +import ( + "bytes" + "errors" + "fmt" + "io/fs" + "log" + "os" + "os/exec" + "os/signal" + "path/filepath" + "strings" + "syscall" + "text/template" + + "github.com/fsnotify/fsnotify" +) + +// NginxController manages the configuration and operation of NGINX. +type NginxController struct { + rootDir string + templatesDir string + gatewayConfig *GatewayConfig + process *os.Process +} + +// generateConfigs generates the NGINX configuration files. +func (nc *NginxController) generateConfigs() { + if err := os.MkdirAll("/etc/nginx", 0o755); err != nil { + log.Fatalf("Failed to create nginx directory: %v", err) + } + + // Recursively copy all template files maintaining the directory structure + err := filepath.Walk(nc.templatesDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + if strings.HasSuffix(path, "~") { + return nil + } + + relativePath, err := filepath.Rel(nc.templatesDir, path) + if err != nil { + return err + } + + destPath := filepath.Join(nc.rootDir, relativePath) + + if info.IsDir() { + if err := os.MkdirAll(destPath, 0o755); err != nil { + return fmt.Errorf("failed to create directory %s: %w", destPath, err) + } + } else { + nc.generateConfig(path, destPath) + } + return nil + }) + if err != nil { + log.Fatalf("Failed to process templates: %v", err) + } +} + +// generateConfig processes a template file and writes the result to the destination path. +func (nc *NginxController) generateConfig(src, dst string) { + var err error + tmpl := template.New(filepath.Base(src)) + tmpl, err = tmpl.Funcs(template.FuncMap{ + "args": templateArgs, + "set_upstream": func(host string, port int) (string, error) { + return buildUpstreamConfig(tmpl, host, port) + }, + }).ParseFiles(src) + if err != nil { + log.Fatalf("Failed to parse template file %s: %v", src, err) + } + + output := &bytes.Buffer{} + err = tmpl.Execute(output, map[string]interface{}{ + "Config": nc.gatewayConfig, + }) + if err != nil { + log.Fatalf("Failed to execute template %s: %v", src, err) + } + + err = os.WriteFile(dst, output.Bytes(), 0o644) + if err != nil { + log.Fatalf("Failed to write config file %s: %v", dst, err) + } +} + +// watchConfigTemplates watches for changes in the template directory and regenerates the configs on changes. +func (nc *NginxController) watchConfigTemplates() { + watcher, err := fsnotify.NewWatcher() + if err != nil { + log.Fatal(err) + } + defer watcher.Close() + + fmt.Println("watching config templates") + + done := make(chan bool) + go func() { + for { + select { + case event, ok := <-watcher.Events: + if !ok { + return + } + if strings.HasSuffix(event.Name, "~") { + break + } + if event.Has(fsnotify.Write) { + fmt.Println("GatewayConfig file modified:", event.Name) + nc.generateConfigs() + nc.reload() + break + } + case err, ok := <-watcher.Errors: + if !ok { + return + } + fmt.Println("Watcher error:", err) + } + } + }() + + err = filepath.WalkDir(nc.templatesDir, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + err = watcher.Add(path) + if err != nil { + log.Println("ERROR: Failed to add directory to watcher:", err) + } + } + return nil + }) + if err != nil { + log.Fatal(err) + } + <-done +} + +// reload sends a SIGHUP signal to the NGINX process to reload the configuration. +func (nc *NginxController) reload() { + if nc.process == nil { + return + } + + log.Println("Reloading nginx process") + + if err := nc.process.Signal(syscall.SIGHUP); err != nil { + log.Fatal("Failed to reload NGINX") + } +} + +// start initializes and starts the NGINX process. +func (nc *NginxController) start() { + fmt.Println("Starting NGINX") + + cmd := exec.Command( + "/usr/local/openresty/nginx/sbin/nginx", + "-c", "/etc/nginx/nginx.conf", + ) + cmd.Stdout, cmd.Stderr = os.Stdout, os.Stderr + cmd.SysProcAttr = &syscall.SysProcAttr{Pdeathsig: syscall.SIGTERM} + + // Goroutine to handle termination signals and gracefully stop NGINX + go func() { + c := make(chan os.Signal, 1) + signal.Notify(c, syscall.SIGINT, syscall.SIGTERM) + <-c + + if err := cmd.Process.Signal(syscall.SIGTERM); err != nil { + log.Fatal(err) + } + }() + + if err := cmd.Start(); err != nil { + log.Fatal(err) + } + + nc.process = cmd.Process + + if err := cmd.Wait(); err != nil { + log.Fatal(err) + } +} + +// templateArgs function to be used for argument handling in templates. +func templateArgs(pairs ...any) (map[string]any, error) { + if len(pairs)%2 != 0 { + return nil, errors.New("misaligned args") + } + + argsMap := make(map[string]any) + for i := 0; i < len(pairs); i += 2 { + if key, ok := pairs[i].(string); ok { + argsMap[key] = pairs[i+1] + } else { + return nil, fmt.Errorf("key must be a string, got %T", pairs[i]) + } + } + return argsMap, nil +} + +func buildUpstreamConfig(tmpl *template.Template, host string, port int) (string, error) { + output := &bytes.Buffer{} + err := tmpl.Lookup("UPSTREAM_CONFIG").Execute( + output, + map[string]interface{}{ + "Host": host, + "Port": port, + }, + ) + if err != nil { + return "", err + } + + return output.String(), nil +} diff --git a/gateway/nginx/conf.d/shellhub.conf b/gateway/nginx/conf.d/shellhub.conf new file mode 100644 index 00000000000..ae00989564e --- /dev/null +++ b/gateway/nginx/conf.d/shellhub.conf @@ -0,0 +1,797 @@ +{{ $cfg := .Config }} + +lua_shared_dict dns_cache 128k; + +limit_req_zone $binary_remote_addr zone=api_limit:{{ $cfg.APIRateLimitZoneSize }} rate={{ $cfg.APIRateLimit }}; +limit_req_status 429; + +{{ $api_burst := "" }} +{{ if $cfg.APIBurstSize }} + {{ $api_burst = printf "burst=%s" $cfg.APIBurstSize }} +{{ end }} + +{{ $api_delay := "nodelay" }} +{{ if ne $cfg.APIBurstDelay "nodelay" }} + {{ $api_delay = printf "delay=%s" $cfg.APIBurstDelay }} +{{ end }} + +server { + {{ if and ($cfg.EnableAutoSSL) (ne $cfg.Env "development") -}} + listen 443 reuseport ssl{{ if $cfg.EnableProxyProtocol }} proxy_protocol{{ end }} backlog={{ $cfg.BacklogSize }}; + ssl_certificate /etc/letsencrypt/live/{{ $cfg.Domain }}/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/{{ $cfg.Domain }}/privkey.pem; + + ssl_session_cache shared:le_nginx_SSL:10m; + ssl_session_timeout 10m; + ssl_session_tickets off; + + ssl_dhparam /etc/shellhub-gateway/dhparam.pem; + + ssl_protocols TLSv1.2 TLSv1.3; + ssl_prefer_server_ciphers off; + + ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384"; + {{ else -}} + listen 80 reuseport{{ if $cfg.EnableProxyProtocol }} proxy_protocol{{ end }} backlog={{ $cfg.BacklogSize }}; + {{- end }} + {{ if $cfg.EnableProxyProtocol }} + set_real_ip_from ::/0; + real_ip_header proxy_protocol; + {{ end }} + server_name {{ $cfg.Domain }}; + server_tokens off; + resolver 127.0.0.11 ipv6=off; + + {{ if not $cfg.EnableCloud }} + + location = /api/setup { + set $upstream api:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_pass http://$upstream; + } + + {{ end }} + + location /healthcheck { + return 200; + } + + location / { + set $upstream ui:8080; + + add_header Cache-Control "no-cache, no-store"; + add_header Pragma "no-cache"; + + proxy_pass http://$upstream; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header Upgrade $http_upgrade; + proxy_http_version 1.1; + proxy_cache_bypass $http_upgrade; + proxy_redirect off; + } + + location ^~ /.well-known/acme-challenge/ { + default_type "text/plain"; + root /etc/letsencrypt; + } + + location /api { + set $upstream api:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $api_key $upstream_http_x_api_key; + auth_request_set $role $upstream_http_x_role; + auth_request_set $admin $upstream_http_x_admin; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Api-Key $api_key; + proxy_set_header X-ID $id; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_set_header X-Admin $admin; + proxy_pass http://$upstream; + } + + location /api/user { + set $upstream api:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + {{/* + The route for deleting users is available only in cloud instances. + Community users must use the CLI, while enterprise users have access + to the admin dashboard. + */}} + if ($request_method = DELETE) { + set $upstream cloud:8080; + } + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $api_key $upstream_http_x_api_key; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Api-Key $api_key; + proxy_set_header X-ID $id; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + + location ~^/api/namespaces/[^/]+/members/accept-invite$ { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $api_key $upstream_http_x_api_key; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Api-Key $api_key; + proxy_set_header X-ID $id; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + + location ~^/api/namespaces/[^/]+/members/[^/]+/accept-invite$ { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request off; + + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Request-ID $request_id; + + proxy_pass http://$upstream; + } + + {{ if $cfg.EnableEnterprise -}} + location ~^/api/namespaces/[^/]+/members/invites$ { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $api_key $upstream_http_x_api_key; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Api-Key $api_key; + proxy_set_header X-ID $id; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableEnterprise -}} + location ~ ^/api/namespaces/([^/]+)/support$ { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header X-ID $id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableEnterprise -}} + location ~ ^/api/devices/([^/]+)/tunnels(/.*)?$ { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header X-ID $id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + + location ~ ^/api/web-endpoints(/.*)?$ { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header X-ID $id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + {{ end -}} + + location ~ ^/(install.sh|kickstart.sh)$ { + set $upstream api:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + rewrite ^/(.*)$ /api/install break; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_pass http://$upstream; + } + + location /api/auth/user { + set $upstream api:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth/skip; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_set_header X-ID $id; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + + {{ if eq $cfg.Env "development" -}} + location /openapi { + set $upstream openapi:8080; + + proxy_pass http://$upstream; + } + + location /openapi/mock { + set $upstream openapi:8080; + + rewrite ^/openapi/mock/?(.*)$ /$1 break; + proxy_pass http://$upstream; + } + + location /openapi/proxy { + set $upstream openapi:8080; + + rewrite ^/openapi/proxy/?(.*)$ /$1 break; + proxy_pass http://$upstream; + } + {{- end }} + + location ~ ^/admin$ { + rewrite ^/admin/?$ /admin/ permanent; + } + + {{ if $cfg.EnableEnterprise -}} + location /admin/api { + set $upstream cloud:8080; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $api_key $upstream_http_x_api_key; + auth_request_set $role $upstream_http_x_role; + auth_request_set $admin $upstream_http_x_admin; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Api-Key $api_key; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Role $role; + proxy_set_header X-Username $username; + proxy_set_header X-Admin $admin; + + rewrite ^/admin/(.*)$ /admin/$1 break; + proxy_pass http://$upstream; + } + {{ end -}} + + location /ssh/connection { + set $upstream ssh:8080; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $device_uid $upstream_http_x_device_uid; + proxy_pass http://$upstream; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + proxy_set_header Upgrade $http_upgrade; + {{ if $cfg.EnableProxyProtocol -}} + proxy_set_header X-Real-IP $proxy_protocol_addr; + {{ else -}} + proxy_set_header X-Real-IP $x_real_ip; + {{ end -}} + proxy_set_header X-Device-UID $device_uid; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Request-ID $request_id; + proxy_http_version 1.1; + proxy_cache_bypass $http_upgrade; + proxy_redirect off; + } + + location /ssh/revdial { + set $upstream ssh:8080; + + proxy_pass http://$upstream; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + proxy_set_header Upgrade $http_upgrade; + {{ if $cfg.EnableProxyProtocol -}} + proxy_set_header X-Real-IP $proxy_protocol_addr; + {{ else -}} + proxy_set_header X-Real-IP $x_real_ip; + {{ end -}} + proxy_set_header X-Request-ID $request_id; + proxy_http_version 1.1; + proxy_cache_bypass $http_upgrade; + proxy_redirect off; + } + + location /agent/connection { + set $upstream ssh:8080; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $device_uid $upstream_http_x_device_uid; + proxy_pass http://$upstream; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + proxy_set_header Upgrade $http_upgrade; + {{ if $cfg.EnableProxyProtocol -}} + proxy_set_header X-Real-IP $proxy_protocol_addr; + {{ else -}} + proxy_set_header X-Real-IP $x_real_ip; + {{ end -}} + proxy_set_header X-Device-UID $device_uid; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Request-ID $request_id; + proxy_http_version 1.1; + proxy_cache_bypass $http_upgrade; + proxy_redirect off; + } + + location /ssh/auth { + set $upstream api:8080; + + auth_request /auth; + auth_request_set $device_uid $upstream_http_x_device_uid; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_pass http://$upstream; + proxy_set_header X-Device-UID $device_uid; + proxy_set_header X-Request-ID $request_id; + } + + {{ if $cfg.EnableCloud -}} + location /api/users/invitations { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $api_key $upstream_http_x_api_key; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Api-Key $api_key; + proxy_set_header X-ID $id; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + + location ~^/api/namespaces/[^/]+/invitations(/.*)?$ { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $api_key $upstream_http_x_api_key; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Api-Key $api_key; + proxy_set_header X-ID $id; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableCloud -}} + location /api/announcements { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableCloud -}} + location /api/billing { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header X-ID $id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableCloud -}} + location /api/connector { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_set_header X-ID $id; + proxy_set_header X-Role $role; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableEnterprise -}} + location /api/firewall { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $id $upstream_http_x_id; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header X-ID $id; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableEnterprise -}} + location /api/user/saml/auth { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request off; + proxy_set_header X-Real-IP $x_real_ip; + proxy_set_header X-Forwarded-Proto $x_forwarded_proto; + proxy_set_header X-Forwarded-Host $host; + proxy_pass http://$upstream; + } + + location /api/user/mfa { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $id $upstream_http_x_id; + error_page 500 =401 /auth; + proxy_set_header X-Real-IP $x_real_ip; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-ID $id; + proxy_pass http://$upstream; + } + + location ~^/api/user/mfa/(auth|recover|reset)(?:/.*)?$ { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request off; + proxy_set_header X-Real-IP $x_real_ip; + proxy_set_header X-Forwarded-Host $host; + proxy_pass http://$upstream; + } + + location /api/register { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + proxy_set_header X-Forwarded-Host $host; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableCloud -}} + location /api/user/recover_password { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + proxy_set_header X-Forwarded-Host $host; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableCloud -}} + location ~* /api/user/(.*)/update_password { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableCloud -}} + location /api/user/resend_email { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + proxy_set_header X-Forwarded-Host $host; + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableCloud -}} + location /api/user/validation_account { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + proxy_pass http://$upstream; + } + {{ end -}} + + {{ if $cfg.EnableEnterprise -}} + location ~* /api/sessions/(.*)/records/(.*) { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $username $upstream_http_x_username; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_http_version 1.1; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_set_header X-Username $username; + proxy_pass http://$upstream; + } + {{ end -}} + + location ~* /api/sessions/(.*)/close { + set $upstream ssh:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request /auth; + auth_request_set $tenant_id $upstream_http_x_tenant_id; + auth_request_set $role $upstream_http_x_role; + error_page 500 =401 /auth; + proxy_set_header X-Role $role; + proxy_set_header X-Tenant-ID $tenant_id; + proxy_pass http://$upstream; + } + + location /api/devices/auth { + set $upstream api:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request off; + {{ if $cfg.EnableProxyProtocol -}} + proxy_set_header X-Real-IP $proxy_protocol_addr; + {{ else -}} + proxy_set_header X-Real-IP $x_real_ip; + {{ end -}} + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_pass http://$upstream; + } + + location /api/login { + set $upstream api:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request off; + proxy_pass http://$upstream; + } + + location /api/webhook-billing { + set $upstream cloud:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + auth_request off; + proxy_pass http://$upstream; + } + + location /auth { + set $upstream_auth api:8080; + + internal; + rewrite ^/(.*)$ /internal/$1 break; + proxy_http_version 1.1; + proxy_pass http://$upstream_auth; + } + + location /auth/skip { + set $upstream_auth api:8080; + + internal; + rewrite ^/auth/(.*)$ /internal/auth?args=$1 break; + proxy_http_version 1.1; + proxy_pass http://$upstream_auth; + } + + location /ws { + set $upstream ssh:8080; + + proxy_pass http://$upstream; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + proxy_set_header Upgrade $http_upgrade; + {{ if $cfg.EnableProxyProtocol -}} + proxy_set_header X-Real-IP $proxy_protocol_addr; + {{ else -}} + proxy_set_header X-Real-IP $x_real_ip; + {{ end -}} + proxy_http_version 1.1; + proxy_cache_bypass $http_upgrade; + proxy_redirect off; + } + + location /info { + set $upstream api:8080; + limit_req zone=api_limit{{ if $api_burst }} {{ $api_burst }}{{ end }} {{ $api_delay }}; + + proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $x_forwarded_port; + proxy_http_version 1.1; + proxy_pass http://$upstream; + rewrite ^/(.*)$ /api/info break; + } + + location = /nginx_status { + stub_status; + allow 127.0.0.1; + deny all; + } +} + +{{ if and $cfg.EnableEnterprise $cfg.WebEndpoints -}} +{{ $DOMAIN := or $cfg.WebEndpointsDomain $cfg.Domain }} +server { + {{ if and ($cfg.EnableAutoSSL) (ne $cfg.Env "development") -}} + listen 443; + ssl_certificate "/etc/letsencrypt/live/*.{{ $DOMAIN }}/fullchain.pem"; + ssl_certificate_key "/etc/letsencrypt/live/*.{{ $DOMAIN }}/privkey.pem"; + + ssl_session_cache shared:le_nginx_SSL:10m; + ssl_session_timeout 10m; + ssl_session_tickets off; + + ssl_dhparam /etc/shellhub-gateway/dhparam.pem; + + ssl_protocols TLSv1.2 TLSv1.3; + ssl_prefer_server_ciphers off; + + ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384"; + {{ else -}} + listen 80; + {{- end }} + + server_name "~^(?

[a-f0-9]{32})\.{{ $DOMAIN }}$"; + resolver 127.0.0.11 ipv6=off; + + location ~ ^/(?.*) { + set $upstream ssh:8080; + + rewrite ^/(.*)$ /http/proxy break; + proxy_set_header X-Request-ID $request_id; + proxy_set_header X-Address $address; + proxy_set_header X-Path /$path$is_args$args; + + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_buffering off; + + proxy_pass http://$upstream; + } +} +{{ end }} + +{{ if and ($cfg.EnableAutoSSL) (ne $cfg.Env "development") }} +server { + listen 80 default_server{{ if $cfg.EnableProxyProtocol }} proxy_protocol{{ end }}; + + return 308 https://$host$request_uri; +} +{{- end }} + +{{/* + This template is used to set the values of `upstream_host` and `upstream_port` + dynamically within the request context (`ngx.ctx`). These values are then used by the upstream router + to direct traffic to the appropriate backend server. To use this template, the `set_upstream` function + must be called, which handles the internal invocation of this template. +*/}} diff --git a/gateway/nginx/lua/balancer.lua b/gateway/nginx/lua/balancer.lua new file mode 100644 index 00000000000..34431c830b3 --- /dev/null +++ b/gateway/nginx/lua/balancer.lua @@ -0,0 +1,115 @@ +{{ $cfg := .Config }} + +local ngx_balancer = require("ngx.balancer") +local resolver = require("resty.dns.resolver") +local dns_cache = ngx.shared.dns_cache +local hostnames = {} + +local _M = {} + +local function resolve_dns(hostname) + local r, err = resolver:new{ + nameservers = {"127.0.0.11"}, + retrans = 5, + timeout = 2000, + } + + if not r then + ngx.log(ngx.ERR, "failed to instantiate the resolver: ", err) + return + end + + local answers, err = r:query(hostname, nil, {}) + if not answers then + ngx.log(ngx.ERR, "failed to query the DNS server for ", hostname, ": ", err) + return + end + + if answers.errcode then + ngx.log(ngx.ERR, "server returned error code for ", hostname, ": ", answers.errcode, ": ", answers.errstr) + return + end + + ngx.log(ngx.INFO, "DNS resolution for ", hostname, " returned ", #answers, " answers") + + for _, ans in ipairs(answers) do + if ans.address then + local ok, err = dns_cache:set(hostname, ans.address, 45) + if not ok then + ngx.log(ngx.ERR, "Failed to set ", hostname, " in cache: ", err) + return + end + + ngx.log(ngx.INFO, "Resolved ", hostname, " to ", ans.address) + return ans.address + end + end +end + +local function resolve_all_dns() + ngx.log(ngx.INFO, "Resolving DNS for ", #hostnames, " hostnames") + + for _, hostname in ipairs(hostnames) do + resolve_dns(hostname) + end +end + +local function contains_hostname(value) + for _, v in ipairs(hostnames) do + if v == value then + return true + end + end + return false +end + +function _M.init_worker() + local ok, err = ngx.timer.every(60, resolve_all_dns) + if not ok then + ngx.log(ngx.ERR, "failed to create the timer: ", err) + end +end + +function _M.balance() + local hostname = ngx.ctx.upstream_host + if not hostname then + ngx.log(ngx.ERR, "Missing 'upstream_host' variable") + return ngx.exit(500) + end + + local port = ngx.ctx.upstream_port + if not port then + ngx.log(ngx.ERR, "Missing 'upstream_port' variable") + return ngx.exit(500) + end + + local ip = dns_cache:get(hostname) + if not ip then + ngx.log(ngx.ERR, "DNS resolution for ", hostname, " not found in cache") + return ngx.exit(500) + end + + ngx_balancer.set_more_tries(1) + + local ok, err = ngx_balancer.set_current_peer(ip, port) + if not ok then + ngx.log(ngx.ERR, "Failed to set the current peer: ", err) + return ngx.exit(500) + end +end + +function _M.set_peer(host, port) + ngx.ctx.upstream_host = host + ngx.ctx.upstream_port = port + + if not contains_hostname(host) then + table.insert(hostnames, host) + end + + local ip = dns_cache:get(host) + if not ip then + resolve_dns(host) + end +end + +return _M \ No newline at end of file diff --git a/gateway/nginx/nginx.conf b/gateway/nginx/nginx.conf new file mode 100644 index 00000000000..3e3423624e6 --- /dev/null +++ b/gateway/nginx/nginx.conf @@ -0,0 +1,89 @@ +{{ $cfg := .Config }} + +daemon off; + +worker_processes {{ $cfg.WorkerProcesses }}; +worker_rlimit_nofile {{ $cfg.MaxWorkerOpenFiles }}; + +events { + use epoll; + multi_accept on; + worker_connections {{ $cfg.MaxWorkerConnections }}; +} + +http { + aio threads; + aio_write on; + + tcp_nopush on; + tcp_nodelay on; + + sendfile on; + + reset_timedout_connection on; + + gzip on; + gzip_comp_level 1; + gzip_http_version 1.1; + gzip_min_length 256; + gzip_types application/atom+xml application/javascript application/x-javascript application/json application/rss+xml application/vnd.ms-fontobject application/x-font-ttf application/x-web-app-manifest+json application/xhtml+xml application/xml font/opentype image/svg+xml image/x-icon text/css text/javascript text/plain text/x-component; + gzip_proxied any; + gzip_vary on; + + include /usr/local/openresty/nginx/conf/mime.types; + default_type application/octet-stream; + + client_body_temp_path /var/run/openresty/nginx-client-body; + proxy_temp_path /var/run/openresty/nginx-proxy; + + lua_package_path "/etc/nginx/lua/?.lua;;"; + + map $http_x_real_ip $x_real_ip { + default $http_x_real_ip; + "" $remote_addr; + } + + map $http_host $http_port { + default $server_port; + "~^[^\:]+:(?

\d+)$" $p; + } + + map $http_x_forwarded_proto $x_forwarded_proto { + default $http_x_forwarded_proto; + "" $scheme; + } + + map $http_x_forwarded_port $x_forwarded_port { + default $http_x_forwarded_port; + "" $http_port; + } + + map $http_upgrade $connection_upgrade { + default upgrade; + '' ''; + } + + log_format nginxlog_json escape=json '{ "timestamp": "$time_iso8601", ' + '"remote_addr": "$remote_addr", ' + '"body_bytes_sent": $body_bytes_sent, ' + '"request_time": $request_time, ' + '"response_status": $status, ' + '"request": "$request", ' + '"request_method": "$request_method", ' + '"host": "$host",' + '"upstream_addr": "$upstream_addr",' + '"http_x_forwarded_for": "$http_x_forwarded_for",' + '"http_referrer": "$http_referer", ' + '"http_user_agent": "$http_user_agent", ' + '"http_version": "$server_protocol", ' + '"request_id": "$request_id", ' + '"nginx_access": true }'; + + {{ if $cfg.EnableAccessLogs }} + access_log /dev/stdout nginxlog_json; + {{ else }} + access_log off; + {{ end }} + + include /etc/nginx/conf.d/*.conf; +} diff --git a/gateway/utils.go b/gateway/utils.go new file mode 100644 index 00000000000..f3f69450efe --- /dev/null +++ b/gateway/utils.go @@ -0,0 +1,46 @@ +package main + +import ( + "log" + "os" + "path" + "strconv" + "strings" + + "golang.org/x/sys/unix" +) + +// rlimitMaxNumFiles returns the maximum number of open files allowed by the system. +func rlimitMaxNumFiles() int { + var rLimit unix.Rlimit + if err := unix.Getrlimit(unix.RLIMIT_NOFILE, &rLimit); err != nil { + log.Fatal(err) + } + return int(rLimit.Max) +} + +// getSysctl retrieves the value of a given sysctl parameter. +func getSysctl(sysctl string) int { + data, err := os.ReadFile( + path.Join("/proc/sys/", strings.ReplaceAll(sysctl, ".", "/")), + ) + if err != nil { + log.Println(err) + return -1 + } + + value, err := strconv.Atoi(strings.Trim(string(data), " \n")) + if err != nil { + log.Println(err) + return -1 + } + + return value +} + +// halfString return the halfString of the string. +func halfString(s string) string { + runes := []rune(s) + n := len(runes) / 2 + return string(runes[:n]) + "..." +} diff --git a/giscus.json b/giscus.json new file mode 100644 index 00000000000..82b20494c35 --- /dev/null +++ b/giscus.json @@ -0,0 +1,3 @@ +{ + "origins": ["https://docs.shellhub.io"] +} diff --git a/go.mod b/go.mod index 512e48839dc..98095a38efa 100644 --- a/go.mod +++ b/go.mod @@ -1,86 +1,116 @@ module github.com/shellhub-io/shellhub -go 1.20 +go 1.24.9 require ( - github.com/GehirnInc/crypt v0.0.0-20230320061759-8cc1b52080c5 - github.com/Masterminds/semver v1.5.0 - github.com/creack/pty v1.1.18 - github.com/docker/docker v24.0.7+incompatible - github.com/gliderlabs/ssh v0.3.5 - github.com/go-playground/assert/v2 v2.2.0 + github.com/adhocore/gronx v1.8.1 github.com/go-playground/validator/v10 v10.11.2 github.com/go-redis/cache/v8 v8.4.4 github.com/go-redis/redis/v8 v8.11.5 github.com/go-resty/resty/v2 v2.7.0 - github.com/golang-jwt/jwt/v4 v4.5.0 - github.com/google/uuid v1.2.0 + github.com/golang-jwt/jwt/v4 v4.5.2 + github.com/google/uuid v1.6.0 github.com/gorilla/websocket v1.5.0 + github.com/hashicorp/yamux v0.1.2 github.com/hibiken/asynq v0.24.1 github.com/jarcoal/httpmock v1.3.1 github.com/labstack/echo/v4 v4.10.2 - github.com/mattn/go-shellwords v1.0.12 - github.com/mholt/archiver/v3 v3.5.1 + github.com/labstack/gommon v0.4.0 + github.com/mholt/archiver/v4 v4.0.0-alpha.8 github.com/oschwald/geoip2-golang v1.8.0 - github.com/pkg/errors v0.9.1 - github.com/pkg/sftp v1.13.5 github.com/sethvargo/go-envconfig v0.9.0 - github.com/sirupsen/logrus v1.9.0 - github.com/stretchr/testify v1.8.2 - golang.org/x/crypto v0.17.0 - golang.org/x/sys v0.15.0 + github.com/sirupsen/logrus v1.9.3 + github.com/stretchr/testify v1.10.0 + github.com/testcontainers/testcontainers-go/modules/redis v0.32.0 + golang.org/x/crypto v0.45.0 + golang.org/x/sync v0.18.0 ) require ( - github.com/Microsoft/go-winio v0.6.1 // indirect + dario.cat/mergo v1.0.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/Microsoft/hcsshim v0.11.7 // indirect github.com/andybalholm/brotli v1.0.5 // indirect - github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect + github.com/bodgit/plumbing v1.2.0 // indirect + github.com/bodgit/sevenzip v1.3.0 // indirect + github.com/bodgit/windows v1.0.0 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/connesc/cipherio v0.2.1 // indirect + github.com/containerd/containerd v1.7.29 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/platforms v0.2.1 // indirect + github.com/cpuguy83/dockercfg v0.3.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect - github.com/docker/distribution v2.8.2+incompatible // indirect - github.com/docker/go-connections v0.4.0 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/docker v28.0.0+incompatible // indirect + github.com/docker/go-connections v0.5.0 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/protobuf v1.5.4 // indirect github.com/golang/snappy v0.0.4 // indirect - github.com/google/go-cmp v0.5.9 // indirect - github.com/klauspost/compress v1.16.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/klauspost/compress v1.17.4 // indirect github.com/klauspost/pgzip v1.2.5 // indirect - github.com/kr/fs v0.1.0 // indirect - github.com/labstack/gommon v0.4.0 // indirect github.com/leodido/go-urn v1.2.2 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/magiconair/properties v1.8.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.17 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/sequential v0.5.0 // indirect + github.com/moby/sys/user v0.3.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect github.com/moby/term v0.5.0 // indirect github.com/morikuni/aec v1.0.0 // indirect - github.com/nwaples/rardecode v1.1.3 // indirect + github.com/nwaples/rardecode/v2 v2.2.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.0.2 // indirect + github.com/opencontainers/image-spec v1.1.0 // indirect github.com/oschwald/maxminddb-golang v1.10.0 // indirect github.com/pierrec/lz4/v4 v4.1.17 // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/redis/go-redis/v9 v9.0.3 // indirect github.com/robfig/cron/v3 v3.0.1 // indirect + github.com/shirou/gopsutil/v3 v3.23.12 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/spf13/cast v1.3.1 // indirect - github.com/stretchr/objx v0.5.0 // indirect - github.com/ulikunitz/xz v0.5.11 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/testcontainers/testcontainers-go v0.32.0 // indirect + github.com/therootcompany/xz v1.0.1 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect + github.com/ulikunitz/xz v0.5.14 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect github.com/vmihailenco/go-tinylfu v0.2.2 // indirect github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect - golang.org/x/mod v0.8.0 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sync v0.1.0 // indirect - golang.org/x/text v0.14.0 // indirect - golang.org/x/time v0.3.0 // indirect - golang.org/x/tools v0.6.0 // indirect - google.golang.org/protobuf v1.26.0 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 // indirect + go.opentelemetry.io/otel v1.37.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0 // indirect + go.opentelemetry.io/otel/metric v1.37.0 // indirect + go.opentelemetry.io/otel/sdk v1.26.0 // indirect + go.opentelemetry.io/otel/trace v1.37.0 // indirect + go4.org v0.0.0-20200411211856-f5505b9728dd // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/text v0.31.0 // indirect + golang.org/x/time v0.12.0 // indirect + google.golang.org/protobuf v1.35.2 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - gotest.tools/v3 v3.5.0 // indirect ) diff --git a/go.sum b/go.sum new file mode 100644 index 00000000000..241bb5a6492 --- /dev/null +++ b/go.sum @@ -0,0 +1,603 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.11.7 h1:vl/nj3Bar/CvJSYo7gIQPyRWc9f3c6IeSNavBTSZNZQ= +github.com/Microsoft/hcsshim v0.11.7/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA09d4bExKcU= +github.com/adhocore/gronx v1.8.1 h1:F2mLTG5sB11z7vplwD4iydz3YCEjstSfYmCrdSm3t6A= +github.com/adhocore/gronx v1.8.1/go.mod h1:7oUY1WAU8rEJWmAxXR2DN0JaO4gi9khSgKjiRypqteg= +github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs= +github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/bodgit/plumbing v1.2.0 h1:gg4haxoKphLjml+tgnecR4yLBV5zo4HAZGCtAh3xCzM= +github.com/bodgit/plumbing v1.2.0/go.mod h1:b9TeRi7Hvc6Y05rjm8VML3+47n4XTZPtQ/5ghqic2n8= +github.com/bodgit/sevenzip v1.3.0 h1:1ljgELgtHqvgIp8W8kgeEGHIWP4ch3xGI8uOBZgLVKY= +github.com/bodgit/sevenzip v1.3.0/go.mod h1:omwNcgZTEooWM8gA/IJ2Nk/+ZQ94+GsytRzOJJ8FBlM= +github.com/bodgit/windows v1.0.0 h1:rLQ/XjsleZvx4fR1tB/UxQrK+SJ2OFHzfPjLWWOhDIA= +github.com/bodgit/windows v1.0.0/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM= +github.com/bsm/ginkgo/v2 v2.7.0 h1:ItPMPH90RbmZJt5GtkcNvIRuGEdwlBItdNVoyzaNQao= +github.com/bsm/ginkgo/v2 v2.7.0/go.mod h1:AiKlXPm7ItEHNc/2+OkrNG4E0ITzojb9/xWzvQ9XZ9w= +github.com/bsm/gomega v1.26.0 h1:LhQm+AFcgV2M0WyKroMASzAzCAJVpAxQXv4SaI9a69Y= +github.com/bsm/gomega v1.26.0/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/connesc/cipherio v0.2.1 h1:FGtpTPMbKNNWByNrr9aEBtaJtXjqOzkIXNYJp6OEycw= +github.com/connesc/cipherio v0.2.1/go.mod h1:ukY0MWJDFnJEbXMQtOcn2VmTpRfzcTz4OoVrWGGJZcA= +github.com/containerd/containerd v1.7.29 h1:90fWABQsaN9mJhGkoVnuzEY+o1XDPbg9BTC9QTAHnuE= +github.com/containerd/containerd v1.7.29/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v28.0.0+incompatible h1:Olh0KS820sJ7nPsBKChVhk5pzqcwDR15fumfAd/p9hM= +github.com/docker/docker v28.0.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY= +github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= +github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.11.2 h1:q3SHpufmypg+erIExEKUmsgmhDTyhcJ38oeKGACXohU= +github.com/go-playground/validator/v10 v10.11.2/go.mod h1:NieE624vt4SCTJtD87arVLvdmjPAeV8BQlHtMnw9D7s= +github.com/go-redis/cache/v8 v8.4.4 h1:Rm0wZ55X22BA2JMqVtRQNHYyzDd0I5f+Ec/C9Xx3mXY= +github.com/go-redis/cache/v8 v8.4.4/go.mod h1:JM6CkupsPvAu/LYEVGQy6UB4WDAzQSXkR0lUCbeIcKc= +github.com/go-redis/redis/v8 v8.11.3/go.mod h1:xNJ9xDG09FsIPwh3bWdk+0oDWHbtF9rPN0F/oD9XeKc= +github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= +github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= +github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY= +github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= +github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 h1:/c3QmbOGMGTOumP2iT/rCwB7b0QDGLKzqOmktBjT+Is= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1/go.mod h1:5SN9VR2LTsRFsrEC6FHgRbTWrTHu6tqPeKxEQv15giM= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= +github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= +github.com/hibiken/asynq v0.24.1 h1:+5iIEAyA9K/lcSPvx3qoPtsKJeKI5u9aOIvUmSsazEw= +github.com/hibiken/asynq v0.24.1/go.mod h1:u5qVeSbrnfT+vtG5Mq8ZPzQu/BmCKMHvTGb91uy9Tts= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww= +github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= +github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= +github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= +github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/labstack/echo/v4 v4.10.2 h1:n1jAhnq/elIFTHr1EYpiYtyKgx4RW9ccVgkqByZaN2M= +github.com/labstack/echo/v4 v4.10.2/go.mod h1:OEyqf2//K1DFdE57vw2DRgWY0M7s65IVQO2FzvI4J5k= +github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8= +github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= +github.com/leodido/go-urn v1.2.2 h1:7z68G0FCGvDk646jz1AelTYNYWrTNm0bEcFAo147wt4= +github.com/leodido/go-urn v1.2.2/go.mod h1:kUaIbLZWttglzwNuG0pgsh5vuV6u2YcGBYz1hIPjtOQ= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= +github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g= +github.com/maxatome/go-testdeep v1.12.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM= +github.com/mholt/archiver/v4 v4.0.0-alpha.8 h1:tRGQuDVPh66WCOelqe6LIGh0gwmfwxUrSSDunscGsRM= +github.com/mholt/archiver/v4 v4.0.0-alpha.8/go.mod h1:5f7FUYGXdJWUjESffJaYR4R60VhnHxb2X3T1teMyv5A= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/user v0.3.0 h1:9ni5DlcW5an3SvRSx4MouotOygvzaXbaSrc/wGDFWPo= +github.com/moby/sys/user v0.3.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A= +github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.15.0/go.mod h1:cIuvLEne0aoVhAgh/O6ac0Op8WWw9H6eYCriF+tEHG0= +github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= +github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/oschwald/geoip2-golang v1.8.0 h1:KfjYB8ojCEn/QLqsDU0AzrJ3R5Qa9vFlx3z6SLNcKTs= +github.com/oschwald/geoip2-golang v1.8.0/go.mod h1:R7bRvYjOeaoenAp9sKRS8GX5bJWcZ0laWO5+DauEktw= +github.com/oschwald/maxminddb-golang v1.10.0 h1:Xp1u0ZhqkSuopaKmk1WwHtjF0H9Hd9181uj2MQ5Vndg= +github.com/oschwald/maxminddb-golang v1.10.0/go.mod h1:Y2ELenReaLAZ0b400URyGwvYxHV1dLIxBuyOsyYjHK0= +github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc= +github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/redis/go-redis/v9 v9.0.3 h1:+7mmR26M0IvyLxGZUHxu4GiBkJkVDid0Un+j4ScYu4k= +github.com/redis/go-redis/v9 v9.0.3/go.mod h1:WqMKv5vnQbRuZstUwxQI195wHy+t4PuXDOjzMvcuQHk= +github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk= +github.com/rwtodd/Go.Sed v0.0.0-20210816025313-55464686f9ef/go.mod h1:8AEUvGVi2uQ5b24BIhcr0GCcpd/RNAFWaN2CJFrWIIQ= +github.com/sethvargo/go-envconfig v0.9.0 h1:Q6FQ6hVEeTECULvkJZakq3dZMeBQ3JUpcKMfPQbKMDE= +github.com/sethvargo/go-envconfig v0.9.0/go.mod h1:Iz1Gy1Sf3T64TQlJSvee81qDhf7YIlt8GMUX6yyNFs0= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/testcontainers/testcontainers-go v0.32.0 h1:ug1aK08L3gCHdhknlTTwWjPHPS+/alvLJU/DRxTD/ME= +github.com/testcontainers/testcontainers-go v0.32.0/go.mod h1:CRHrzHLQhlXUsa5gXjTOfqIEJcrK5+xMDmBr/WMI88E= +github.com/testcontainers/testcontainers-go/modules/redis v0.32.0 h1:HW5Qo9qfLi5iwfS7cbXwG6qe8ybXGePcgGPEmVlVDlo= +github.com/testcontainers/testcontainers-go/modules/redis v0.32.0/go.mod h1:5kltdxVKZG0aP1iegeqKz4K8HHyP0wbkW5o84qLyMjY= +github.com/therootcompany/xz v1.0.1 h1:CmOtsn1CbtmyYiusbfmhmkpAAETj0wBIH6kCYaX+xzw= +github.com/therootcompany/xz v1.0.1/go.mod h1:3K3UH1yCKgBneZYhuQUvJ9HPD19UEXEI0BWbMn8qNMY= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= +github.com/ulikunitz/xz v0.5.14 h1:uv/0Bq533iFdnMHZdRBTOlaNMdb1+ZxXIlHDZHIHcvg= +github.com/ulikunitz/xz v0.5.14/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= +github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/vmihailenco/go-tinylfu v0.2.2 h1:H1eiG6HM36iniK6+21n9LLpzx1G9R3DJa2UjUjbynsI= +github.com/vmihailenco/go-tinylfu v0.2.2/go.mod h1:CutYi2Q9puTxfcolkliPq4npPuofg9N9t8JVrjzwa3Q= +github.com/vmihailenco/msgpack/v5 v5.3.4/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= +github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU= +github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= +github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= +github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 h1:Xs2Ncz0gNihqu9iosIZ5SkBbWo5T8JhhLJFMQL1qmLI= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0/go.mod h1:vy+2G/6NvVMpwGX/NyLqcC41fxepnuKHk16E6IZUcJc= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0 h1:1u/AyyOqAWzy+SkPxDpahCNZParHV8Vid1RnI2clyDE= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0/go.mod h1:z46paqbJ9l7c9fIPCXTqTGwhQZ5XoTIsfeFYWboizjs= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0 h1:1wp/gyxsuYtuE/JFxsQRtcCDtMrO2qMvlfXALU5wkzI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0/go.mod h1:gbTHmghkGgqxMomVQQMur1Nba4M0MQ8AYThXDUjsJ38= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/sdk v1.26.0 h1:Y7bumHf5tAiDlRYFmGqetNcLaVUZmh4iYfmGxtmz7F8= +go.opentelemetry.io/otel/sdk v1.26.0/go.mod h1:0p8MXpqLeJ0pzcszQQN4F0S5FVjBLgypeGSngLsmirs= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +go.opentelemetry.io/proto/otlp v1.2.0 h1:pVeZGk7nXDC9O2hncA6nHldxEjm6LByfA2aN8IOkz94= +go.opentelemetry.io/proto/otlp v1.2.0/go.mod h1:gGpR8txAl5M03pDhMC79G6SdqNV26naRm/KDsgaHD8A= +go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= +go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go4.org v0.0.0-20200411211856-f5505b9728dd h1:BNJlw5kRTzdmyfh5U8F93HA2OwkP7ZGwA51eJ/0wKOU= +go4.org v0.0.0-20200411211856-f5505b9728dd/go.mod h1:CIiUVy99QCPfoE13bO4EZaz5GZMZXMSBGhxRdsvzbkg= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20231211222908-989df2bf70f3 h1:1hfbdAfFbkmpg41000wDVqr7jUpK/Yo+LPnIxxGzmkg= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de h1:jFNzHPIeuzhdRwVhbZdiym9q0ory/xY3sA+v2wPg8I0= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:5iCWqnniDlqZHrd3neWVTOwvh/v6s3232omMecelax8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda h1:LI5DOvAxUPMv/50agcLLoo+AdWc1irS9Rzz4vPuD1V4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM= +google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= +google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/install.sh b/install.sh old mode 100644 new mode 100755 index 420eb85bac7..56e8d1ba6a3 --- a/install.sh +++ b/install.sh @@ -1,167 +1,501 @@ #!/bin/sh +# Overridden variables from Go template: {{.Overrides}} +podman_install() { + [ -n "${KEEPALIVE_INTERVAL}" ] && ARGS="$ARGS -e SHELLHUB_KEEPALIVE_INTERVAL=$KEEPALIVE_INTERVAL" + [ -n "${PREFERRED_HOSTNAME}" ] && ARGS="$ARGS -e SHELLHUB_PREFERRED_HOSTNAME=$PREFERRED_HOSTNAME" + [ -n "${PREFERRED_IDENTITY}" ] && ARGS="$ARGS -e SHELLHUB_PREFERRED_IDENTITY=$PREFERRED_IDENTITY" + + echo "📥 Downloading ShellHub container image..." + + { + $SUDO podman pull -q docker.io/shellhubio/agent:$AGENT_VERSION + } || { + echo "❌ Failed to download shellhub container image." + exit 1 + } + + MODE="" + DEFAULT_CONTAINER_NAME="shellhub" + + case "$1" in + "") ;; + "agent") + shift 1 + ;; + "connector") + MODE="connector" + DEFAULT_CONTAINER_NAME="shellhub-connector" + ARGS="$ARGS -e SHELLHUB_PRIVATE_KEYS=${PRIVATE_KEYS:-/host/etc/shellhub/connector/keys}" + ARGS="$ARGS -e SHELLHUB_CONNECTOR_LABEL=${CONNECTOR_LABEL}" + + echo "🚀 Starting ShellHub container in Docker Connector mode..." + shift 1 + ;; + *) + echo "❌ Invalid mode: $2" + exit 1 + ;; + esac + + if [ -z "$MODE" ]; then + ARGS="$ARGS -e SHELLHUB_PRIVATE_KEY=${PRIVATE_KEY:-/host/etc/shellhub.key}" + + echo "🚀 Starting ShellHub container in Agent mode..." + fi + + CONTAINER_NAME="${CONTAINER_NAME:-$DEFAULT_CONTAINER_NAME}" + + $SUDO podman run -d \ + --name=$CONTAINER_NAME \ + --replace \ + --restart=on-failure \ + --privileged \ + --pid=host \ + --security-opt label=disable \ + --network host \ + -v /:/host \ + -v /dev:/dev \ + -v /var/run/podman/podman.sock:/var/run/docker.sock \ + -v /proc:/proc \ + -v /var/run:/var/run \ + -v /var/log:/var/log \ + -v /tmp:/tmp \ + -e SHELLHUB_SERVER_ADDRESS=$SERVER_ADDRESS \ + -e SHELLHUB_TENANT_ID=$TENANT_ID \ + $ARGS \ + docker.io/shellhubio/agent:$AGENT_VERSION \ + $MODE +} + docker_install() { - KEEPALIVE_INTERVAL_ARG="-e SHELLHUB_KEEPALIVE_INTERVAL=$KEEPALIVE_INTERVAL" - PREFERRED_HOSTNAME_ARG="-e SHELLHUB_PREFERRED_HOSTNAME=$PREFERRED_HOSTNAME" - PREFERRED_IDENTITY_ARG="-e SHELLHUB_PREFERRED_IDENTITY=$PREFERRED_IDENTITY" - - docker run -d \ - --name=$CONTAINER_NAME \ - --restart=on-failure \ - --privileged \ - --net=host \ - --pid=host \ - -v /:/host \ - -v /dev:/dev \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /etc/passwd:/etc/passwd \ - -v /etc/group:/etc/group \ - -v /etc/resolv.conf:/etc/resolv.conf \ - -v /var/run:/var/run \ - -v /var/log:/var/log \ - -e SHELLHUB_SERVER_ADDRESS=$SERVER_ADDRESS \ - -e SHELLHUB_PRIVATE_KEY=/host/etc/shellhub.key \ - -e SHELLHUB_TENANT_ID=$TENANT_ID \ - $KEEPALIVE_INTERVAL_ARG \ - $PREFERRED_HOSTNAME_ARG \ - $PREFERRED_IDENTITY_ARG \ - shellhubio/agent:$AGENT_VERSION + [ -n "${KEEPALIVE_INTERVAL}" ] && ARGS="$ARGS -e SHELLHUB_KEEPALIVE_INTERVAL=$KEEPALIVE_INTERVAL" + [ -n "${PREFERRED_HOSTNAME}" ] && ARGS="$ARGS -e SHELLHUB_PREFERRED_HOSTNAME=$PREFERRED_HOSTNAME" + [ -n "${PREFERRED_IDENTITY}" ] && ARGS="$ARGS -e SHELLHUB_PREFERRED_IDENTITY=$PREFERRED_IDENTITY" + + echo "📥 Downloading ShellHub container image..." + + { + docker pull -q shellhubio/agent:$AGENT_VERSION + } || { + echo "❌ Failed to download shellhub container image." + exit 1 + } + + MODE="" + DEFAULT_CONTAINER_NAME="shellhub" + + case "$1" in + "") ;; + "agent") + shift 1 + ;; + "connector") + MODE="connector" + DEFAULT_CONTAINER_NAME="shellhub-connector" + ARGS="$ARGS -e SHELLHUB_PRIVATE_KEYS=${PRIVATE_KEYS:-/host/etc/shellhub/connector/keys}" + ARGS="$ARGS -e SHELLHUB_CONNECTOR_LABEL=${CONNECTOR_LABEL}" + + echo "🚀 Starting ShellHub container in Docker Connector mode..." + shift 1 + ;; + *) + echo "❌ Invalid mode: $2" + exit 1 + ;; + esac + + if [ -z "$MODE" ]; then + ARGS="$ARGS -e SHELLHUB_PRIVATE_KEY=${PRIVATE_KEY:-/host/etc/shellhub.key}" + + echo "🚀 Starting ShellHub container in Agent mode..." + fi + + CONTAINER_NAME="${CONTAINER_NAME:-$DEFAULT_CONTAINER_NAME}" + + $SUDO docker run -d \ + --name=$CONTAINER_NAME \ + --restart=on-failure \ + --privileged \ + --net=host \ + --pid=host \ + -v /:/host \ + -v /dev:/dev \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /etc/resolv.conf:/etc/resolv.conf \ + -v /var/run:/var/run \ + -v /var/log:/var/log \ + -v /tmp:/tmp \ + -e SHELLHUB_SERVER_ADDRESS=$SERVER_ADDRESS \ + -e SHELLHUB_TENANT_ID=$TENANT_ID \ + $ARGS \ + shellhubio/agent:$AGENT_VERSION \ + $MODE } -bundle_install() { - INSTALL_DIR="${INSTALL_DIR:-/opt/shellhub}" +snap_install() { + if ! type snap >/dev/null 2>&1; then + echo "❌ Snap is not installed or not supported on this system." + exit 1 + fi + + echo "📥 Downloading ShellHub snap package..." - if [ "$(id -u)" -ne 0 ]; then - printf "NOTE: This install method requires root privileges.\n" - SUDO="sudo" + { + sudo snap install --classic --channel=latest/stable shellhub + } || { + echo "❌ Failed to download and install ShellHub snap package." + exit 1 + } + + echo "🚀 Starting ShellHub snap service..." + + { + if [ -n "${PREFERRED_HOSTNAME}" ]; then + sudo snap set shellhub preferred-hostname="${PREFERRED_HOSTNAME}" + fi + + if [ -n "${PREFERRED_IDENTITY}" ]; then + sudo snap set shellhub preferred-identity="${PREFERRED_IDENTITY}" fi - if ! systemctl show-environment > /dev/null 2>&1 ; then - printf "ERROR: This is not a systemd-based operation system. Unable to proceed with the requested action.\n" - exit 1 + if [ -n "${KEEPALIVE_INTERVAL}" ]; then + sudo snap set shellhub keepalive-interval="${KEEPALIVE_INTERVAL}" fi + sudo snap set shellhub server-address="$SERVER_ADDRESS" + sudo snap set shellhub tenant-id="$TENANT_ID" + sudo snap set shellhub private-key="${PRIVATE_KEY:-/etc/shellhub.key}" - echo "Downloading runc static binary..." - { - download "https://github.com/opencontainers/runc/releases/download/${RUNC_VERSION}/runc.${RUNC_ARCH}" $TMP_DIR/runc && chmod 755 $TMP_DIR/runc - } || { rm -rf $TMP_DIR && echo "Failed to download runc binary." && exit 1; } + sudo snap start shellhub + } || { + echo "❌ Failed to start ShellHub snap service." + exit 1 + } +} - echo "Downloading OCI runtime spec file..." - { - download https://raw.githubusercontent.com/shellhub-io/shellhub/${AGENT_VERSION}/agent/packaging/config.json $TMP_DIR/config.json - } || { rm -rf $TMP_DIR && echo "Failed to download OCI runtime spec." && exit 1; } +standalone_install() { + INSTALL_DIR="${INSTALL_DIR:-/opt/shellhub}" - echo "Downloading systemd service file..." - { - download https://raw.githubusercontent.com/shellhub-io/shellhub/${AGENT_VERSION}/agent/packaging/shellhub-agent.service $TMP_DIR/shellhub-agent.service - } || { rm -rf $TMP_DIR && echo "Failed to download systemd service file." && exit 1; } - echo "Downloading rootfs tarball..." - { - download https://github.com/shellhub-io/shellhub/releases/download/$AGENT_VERSION/rootfs-$AGENT_ARCH.tar.gz $TMP_DIR/rootfs.tar.gz - } || { rm -rf $TMP_DIR && echo "Failed to download rootfs." && exit 1; } + if [ "$(id -u)" -ne 0 ]; then + printf "⚠️ NOTE: This install method requires root privileges.\n" + SUDO="sudo" + fi - echo "Extracting rootfs..." - { - mkdir -p $TMP_DIR/rootfs && tar -C $TMP_DIR/rootfs -xzf $TMP_DIR/rootfs.tar.gz && rm -f $TMP_DIR/rootfs.tar.gz - } || { rm -rf $TMP_DIR && echo "Failed to extract rootfs." && exit 1; } + if ! systemctl show-environment >/dev/null 2>&1; then + printf "❌ ERROR: This is not a systemd-based operation system. Unable to proceed with the requested action.\n" + exit 1 + fi - rm -f $TMP_DIR/rootfs/.dockerenv + echo "📥 Downloading required files..." - sed -i "s,__SERVER_ADDRESS__,$SERVER_ADDRESS,g" $TMP_DIR/config.json - sed -i "s,__TENANT_ID__,$TENANT_ID,g" $TMP_DIR/config.json - sed -i "s,__ROOT_PATH__,$INSTALL_DIR/rootfs,g" $TMP_DIR/config.json - sed -i "s,__INSTALL_DIR__,$INSTALL_DIR,g" $TMP_DIR/shellhub-agent.service + { + download "https://github.com/opencontainers/runc/releases/download/${RUNC_VERSION}/runc.${RUNC_ARCH}" $TMP_DIR/runc && chmod 755 $TMP_DIR/runc + } || { rm -rf $TMP_DIR && echo "❌ Failed to download runc binary." && exit 1; } - echo "Creating systemd service and starting it..." + { + download https://raw.githubusercontent.com/shellhub-io/shellhub/${AGENT_VERSION}/agent/packaging/config.json $TMP_DIR/config.json + } || { rm -rf $TMP_DIR && echo "❌ Failed to download OCI runtime spec." && exit 1; } - $SUDO cp $TMP_DIR/shellhub-agent.service /etc/systemd/system/shellhub-agent.service - $SUDO systemctl enable --now shellhub-agent || { rm -rf $TMP_DIR && echo "Failed to enable systemd service."; exit 1; } + { + download https://raw.githubusercontent.com/shellhub-io/shellhub/${AGENT_VERSION}/agent/packaging/shellhub-agent.service $TMP_DIR/shellhub-agent.service + } || { rm -rf $TMP_DIR && echo "❌ Failed to download systemd service file." && exit 1; } - $SUDO rm -rf $INSTALL_DIR - $SUDO mv $TMP_DIR $INSTALL_DIR + { + download https://github.com/shellhub-io/shellhub/releases/download/$AGENT_VERSION/rootfs-$AGENT_ARCH.tar.gz $TMP_DIR/rootfs.tar.gz + } || { rm -rf $TMP_DIR && echo "❌ Failed to download rootfs." && exit 1; } + + echo "📂 Extracting files..." + + { + mkdir -p $TMP_DIR/rootfs && tar -C $TMP_DIR/rootfs -xzf $TMP_DIR/rootfs.tar.gz && rm -f $TMP_DIR/rootfs.tar.gz + } || { rm -rf $TMP_DIR && echo "❌ Failed to extract rootfs." && exit 1; } + + rm -f $TMP_DIR/rootfs/.dockerenv + + sed -i "s,__SERVER_ADDRESS__,$SERVER_ADDRESS,g" $TMP_DIR/config.json + sed -i "s,__TENANT_ID__,$TENANT_ID,g" $TMP_DIR/config.json + sed -i "s,__ROOT_PATH__,$INSTALL_DIR/rootfs,g" $TMP_DIR/config.json + sed -i "s,__INSTALL_DIR__,$INSTALL_DIR,g" $TMP_DIR/shellhub-agent.service + + # Handle optional environment variables + if [ -n "${PREFERRED_HOSTNAME}" ]; then + sed -i "s,__PREFERRED_HOSTNAME__,SHELLHUB_PREFERRED_HOSTNAME=$PREFERRED_HOSTNAME,g" $TMP_DIR/config.json + else + sed -i '/"__PREFERRED_HOSTNAME__"/d' $TMP_DIR/config.json + fi + + if [ -n "${PREFERRED_IDENTITY}" ]; then + sed -i "s,__PREFERRED_IDENTITY__,SHELLHUB_PREFERRED_IDENTITY=$PREFERRED_IDENTITY,g" $TMP_DIR/config.json + else + sed -i '/"__PREFERRED_IDENTITY__"/d' $TMP_DIR/config.json + fi + + if [ -n "${KEEPALIVE_INTERVAL}" ]; then + sed -i "s,__KEEPALIVE_INTERVAL__,SHELLHUB_KEEPALIVE_INTERVAL=$KEEPALIVE_INTERVAL,g" $TMP_DIR/config.json + else + sed -i '/"__KEEPALIVE_INTERVAL__"/d' $TMP_DIR/config.json + fi + + echo "🚀 Starting ShellHub system service..." + + $SUDO cp $TMP_DIR/shellhub-agent.service /etc/systemd/system/shellhub-agent.service + + # NOTE: As we need to check if the service is running to indicate it was installed correctly, we need to copy the + # values to install directory before enable it, to a correctly check the status. + $SUDO rm -rf $INSTALL_DIR + $SUDO mv $TMP_DIR $INSTALL_DIR + + uninstall() { + echo "Please check the logs with the command:" + echo "journalctl -f -u shellhub-agent" + echo "" + echo "❗ Uninstalling ShellHub agent..." $SUDO rm -rf $TMP_DIR + $SUDO rm -rf $INSTALL_DIR + $SUDO rm /etc/systemd/system/shellhub-agent.service + } + + $SUDO systemctl enable --now shellhub-agent || { + uninstall && echo "❌ Failed to enable systemd service." + exit 1 + } + + trap 'echo "❗ Interrupted. Disabling shellhub-agent..."; $SUDO systemctl disable --now shellhub-agent; exit 1' INT + + echo "🔍 Checking service status..." + echo "Please wait for the service to start. This may take a few seconds." + echo "Press Ctrl+C to cancel the installation." + + timeout 15s sh -c ' + journalctl -f -u shellhub-agent --since "$(systemctl show -p ActiveEnterTimestamp shellhub-agent | cut -d= -f2)" | while read -r line; do + if echo "$line" | grep -Eq "Listening for connections"; then + echo "✅ Success: $line" + exit 0 + elif echo "$line" | grep -Eq "fatal"; then + echo "❌ Failure: $line" + exit 2 + fi + done + ' + + exit_code=$? + + if [ $exit_code -eq 124 ]; then + echo "❌ Timeout: Service took too long to start." + echo "Disabling shellhub-agent service..." + $SUDO systemctl disable --now shellhub-agent + uninstall + exit 1 + elif [ $exit_code -eq 2 ]; then + echo "Disabling shellhub-agent service..." + $SUDO systemctl disable --now shellhub-agent + uninstall + exit 1 + fi + + $SUDO rm -rf $TMP_DIR +} + +wsl_install() { + if ! systemctl show-environment >/dev/null 2>&1; then + printf "❌ ERROR: This install method requires systemd to be enabled.\n" + printf "Please refer to the following link for instructions on how to enable systemd:\n" + printf "https://learn.microsoft.com/en-us/windows/wsl/wsl-config#systemd-support\n" + printf "Once systemd is enabled, run this script again to complete the installation.\n" + exit 1 + fi + + if [ "$(wslinfo --networking-mode)" != "mirrored" ]; then + printf "❌ ERROR: WSL networking mode must be set to mirrored.\n" + printf "Please refer to the following link for instructions on how to set the networking mode:\n" + printf "https://learn.microsoft.com/en-us/windows/wsl/networking#mirrored-mode-networking\n" + printf "Once the networking mode is set to mirrored, run this script again to complete the installation.\n" + exit 1 + fi + + standalone_install } download() { - _DOWNLOAD_URL=$1 - _DOWNLOAD_OUTPUT=$2 + _DOWNLOAD_URL=$1 + _DOWNLOAD_OUTPUT=$2 - if type curl > /dev/null 2>&1; then - curl -fsSL $_DOWNLOAD_URL --output $_DOWNLOAD_OUTPUT - elif type wget > /dev/null 2>&1; then - wget -q -O $_DOWNLOAD_OUTPUT $_DOWNLOAD_URL - fi + if type curl >/dev/null 2>&1; then + curl -fsSL $_DOWNLOAD_URL --output $_DOWNLOAD_OUTPUT + elif type wget >/dev/null 2>&1; then + wget -q -O $_DOWNLOAD_OUTPUT $_DOWNLOAD_URL + fi } http_get() { - _HTTP_GET_URL=$1 + _HTTP_GET_URL=$1 - if type curl > /dev/null 2>&1; then - curl -sk $_HTTP_GET_URL - elif type wget > /dev/null 2>&1; then - wget -q -O - $_HTTP_GET_URL - fi + if type curl >/dev/null 2>&1; then + curl -sk $_HTTP_GET_URL + elif type wget >/dev/null 2>&1; then + wget -q -O - $_HTTP_GET_URL + fi } -[ -z "$TENANT_ID" ] && { echo "ERROR: TENANT_ID is missing."; exit 1; } +if [ "$(uname -s)" = "FreeBSD" ]; then + echo "👹 This system is running FreeBSD." + echo "❌ ERROR: Automatic installation is not supported on FreeBSD." + echo + echo "Please refer to the ShellHub port at https://github.com/shellhub-io/ports" + exit 1 +fi + +[ -z "$TENANT_ID" ] && { + echo "ERROR: TENANT_ID is missing." + exit 1 +} SERVER_ADDRESS="${SERVER_ADDRESS:-https://cloud.shellhub.io}" TENANT_ID="${TENANT_ID}" INSTALL_METHOD="$INSTALL_METHOD" AGENT_VERSION="${AGENT_VERSION:-$(http_get $SERVER_ADDRESS/info | sed -E 's/.*"version":\s?"?([^,"]*)"?.*/\1/')}" AGENT_ARCH="$AGENT_ARCH" -CONTAINER_NAME="${CONTAINER_NAME:-shellhub}" RUNC_VERSION=${RUNC_VERSION:-v1.1.3} RUNC_ARCH=$RUNC_ARCH INSTALL_DIR="${INSTALL_DIR:-/opt/shellhub}" -TMP_DIR="${TMP_DIR:-`mktemp -d -t shellhub-installer-XXXXXX`}" - -if type docker > /dev/null 2>&1; then - while :; do - if $SUDO docker info > /dev/null 2>&1; then - INSTALL_METHOD="${INSTALL_METHOD:-docker}" - break - elif [ "$(id -u)" -ne 0 ]; then - [ -z "$SUDO" ] && SUDO="sudo" || { SUDO="" && break; } - fi - done -fi - -INSTALL_METHOD="${INSTALL_METHOD:-bundle}" +TMP_DIR="${TMP_DIR:-$(mktemp -d -t shellhub-installer-XXXXXX)}" # Auto detect arch if it has not already been set if [ -z "$AGENT_ARCH" ]; then - case `uname -m` in - x86_64) - AGENT_ARCH=amd64 - RUNC_ARCH=amd64 - ;; - armv6l) - AGENT_ARCH=arm32v6 - RUNC_ARCH=armel - ;; - armv7l) - AGENT_ARCH=arm32v7 - RUNC_ARCH=armhf - ;; - aarch64) - AGENT_ARCH=arm64v8 - RUNC_ARCH=arm64 - esac + case $(uname -m) in + x86_64) + AGENT_ARCH=amd64 + RUNC_ARCH=amd64 + ;; + armv6l) + AGENT_ARCH=arm32v6 + RUNC_ARCH=armel + ;; + armv7l) + AGENT_ARCH=arm32v7 + RUNC_ARCH=armhf + ;; + aarch64) + AGENT_ARCH=arm64v8 + RUNC_ARCH=arm64 + ;; + esac +fi + +echo "🛠️ ShellHub Agent Installer" +echo +if [ -z "$INSTALL_METHOD" ]; then + echo "This script will install the ShellHub agent on your system." + echo "It will auto-detect the best available installation method." + echo + echo "Installation methods (priority order):" + echo " 1. Docker - If Docker is installed and accessible in rootful mode" + echo " 2. Podman - If Podman is installed and accessible in rootful mode" + echo " 3. Snap - If Snap package manager is available" + echo " 4. WSL - If running in WSL2 with systemd and mirrored networking" + echo " 5. Standalone - Fallback method using runc and systemd" + echo +fi + +echo "⚙️ Detected settings:" +echo "- Server address: $SERVER_ADDRESS" +echo "- Tenant ID: $TENANT_ID" +echo "- Agent version: $AGENT_VERSION" +echo "- Architecture: $AGENT_ARCH" +[ -n "$INSTALL_METHOD" ] && echo "- Install method: $INSTALL_METHOD" +echo + +if [ -z "$INSTALL_METHOD" ] && type docker >/dev/null 2>&1; then + echo "🔍 Checking if Docker is available and accessible in rootful mode..." + + export DOCKER_HOST="${DOCKER_HOST:-unix:///var/run/docker.sock}" + + for prefix in "" "sudo"; do + if $prefix docker info >/dev/null 2>&1; then + SUDO=$prefix + INSTALL_METHOD="docker" + break + fi + done + + [ -z "$INSTALL_METHOD" ] && echo "ℹ️ Docker is not accessible in rootful mode." +fi + +if [ -z "$INSTALL_METHOD" ] && type podman >/dev/null 2>&1; then + echo "🔍 Checking if Podman is available and accessible in rootful mode..." + + export CONTAINER_HOST="${CONTAINER_HOST:-unix:///var/run/podman/podman.sock}" + + for prefix in "" "sudo"; do + if $prefix podman info >/dev/null 2>&1; then + SUDO=$prefix + INSTALL_METHOD="podman" + break + fi + done + + [ -z "$INSTALL_METHOD" ] && echo "ℹ️ Podman is not accessible in rootful mode." +fi + +if [ -z "$INSTALL_METHOD" ]; then + echo + echo "⚠️ NOTE: No recommended installation method was detected." + echo "⚠️ For best performance, easier updates, and better isolation, it is strongly recommended to use Docker or Podman." + echo "ℹ️ The installer will proceed with an alternative method (Snap, Standalone, or WSL), but these may have limitations." + echo +fi + +if [ -z "$INSTALL_METHOD" ] && type snap >/dev/null 2>&1; then + echo "🔍 Detected Snap package manager..." + INSTALL_METHOD="snap" +fi + +# Check if running on WSL +if grep -qi Microsoft /proc/version; then + echo "🔍 Detected WSL environment..." + + WSL_EXE=$(find /mnt/*/Windows/System32/wsl.exe 2>/dev/null | head -n 1) + WSL_VERSION=$($WSL_EXE -v | tr -d '\0' | grep "WSL version" | awk -F'[ .:]+' '{print $3}') + + if [ -z "$WSL_VERSION" ] || [ "$WSL_VERSION" -lt 2 ]; then + echo "❌ ERROR: WSL version 2 is required to run ShellHub." + exit 1 + fi + + if grep -qi 'NAME="Ubuntu"' /etc/os-release; then + INSTALL_METHOD="wsl" + else + echo "❌ Error: Only Ubuntu is supported in WSL." + exit 1 + fi fi -echo "Install method: $INSTALL_METHOD" -echo "Agent version: $AGENT_VERSION" +[ -z "$INSTALL_METHOD" ] && INSTALL_METHOD="standalone" case "$INSTALL_METHOD" in - bundle) - bundle_install - ;; - docker) - docker_install - ;; - *) - echo "Install method not supported." - exit 1 +podman) + echo "🐳 Installing ShellHub using podman method..." + podman_install "$@" + ;; +docker) + echo "🐳 Installing ShellHub using docker method..." + docker_install "$@" + ;; +snap) + echo "📦 Installing ShellHub using snap method..." + snap_install + ;; +standalone) + echo "🐧 Installing ShellHub using standalone method..." + standalone_install + ;; +wsl) + echo "🪟 Installing ShellHub using WSL method..." + wsl_install + ;; +*) + echo "❌ Install method not supported." + exit 1 + ;; esac diff --git a/openapi/.air.toml b/openapi/.air.toml new file mode 100644 index 00000000000..9756dc3a77d --- /dev/null +++ b/openapi/.air.toml @@ -0,0 +1,27 @@ +root = "../" +tmp_dir = "tmp" + +[build] +pre_cmd = [] +cmd = "go build -gcflags=\"all=-N -l\" -o ./tmp/main ." +post_cmd = [] +bin = "" +full_bin = "" +args_bin = [] +delay = 500 +exclude_dir = ["assets", "tmp", "vendor", "testdata"] +exclude_file = [] +exclude_regex = ["_test.go"] +exclude_unchanged = false +follow_symlink = false +include_dir = [] +include_ext = ["html", "go", "yml", "yaml"] +include_file = [] +kill_delay = "0s" +log = "build-errors.log" +poll = false +poll_interval = 0 +rerun = false +rerun_delay = 500 +send_interrupt = false +stop_on_error = false diff --git a/openapi/.dockerignore b/openapi/.dockerignore new file mode 100644 index 00000000000..da79aa8066a --- /dev/null +++ b/openapi/.dockerignore @@ -0,0 +1,6 @@ +.vscode/ +.idea/ +.vim/ +node_modules/ +bundled/ +.env diff --git a/openapi/.gitignore b/openapi/.gitignore new file mode 100644 index 00000000000..2309f5e9e8f --- /dev/null +++ b/openapi/.gitignore @@ -0,0 +1,6 @@ +.vscode/ +.idea/ +.vim/ +bundled/ +.env +openapi.json diff --git a/openapi/.prettierignore b/openapi/.prettierignore new file mode 100644 index 00000000000..3ac3a14bd3f --- /dev/null +++ b/openapi/.prettierignore @@ -0,0 +1 @@ +bundled/ diff --git a/openapi/.prettierrc b/openapi/.prettierrc new file mode 100644 index 00000000000..613958b8ff9 --- /dev/null +++ b/openapi/.prettierrc @@ -0,0 +1,9 @@ +{ + "printWidth": 100, + "tabWidth": 2, + "useTabs": false, + "semi": true, + "singleQuote": true, + "trailingComma": "es5", + "arrowParens": "avoid" +} diff --git a/openapi/Dockerfile b/openapi/Dockerfile new file mode 100644 index 00000000000..8f20a379e06 --- /dev/null +++ b/openapi/Dockerfile @@ -0,0 +1,43 @@ +FROM golang:1.25-alpine3.22 AS base + +ARG GOPROXY +ENV GOPROXY ${GOPROXY} + +RUN apk add --no-cache openssl build-base nodejs npm openjdk11-jre git + +RUN npm install -g @openapitools/openapi-generator-cli + +RUN npm install -g @redocly/cli@1.0.0-beta.100 + +RUN npm install -g @stoplight/prism-cli@4.6.1 + +RUN npm install -g prettier@2.8.7 + +RUN go install github.com/air-verse/air@v1.62 && \ + go install github.com/go-delve/delve/cmd/dlv@v1.25 && \ + go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.1.6 && \ + go install github.com/vektra/mockery/v2/...@v2.53.2 + +FROM base AS development + +ARG GOPROXY + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub + +COPY ./go.mod ./ + +# As OpenAPI directory doens't depend on other modules in the repo, we don't need to copy go.sum. +# COPY ./go.sum ./ + +WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/openapi + +COPY ./openapi/go.mod ./ + +# As OpenAPI directory doens't depend on other modules in the repo, we don't need to copy go.sum. +# COPY ./openapi/go.sum ./ + +RUN go mod download + +COPY ./openapi/entrypoint.sh /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/openapi/README.md b/openapi/README.md new file mode 100644 index 00000000000..c993774e7bb --- /dev/null +++ b/openapi/README.md @@ -0,0 +1,120 @@ +

+ shellhub OpenAPI spec +

+ +

ShellHub OpenAPI for Community, Cloud and Enterprise

+ +

+ + +

+ +## How it works? + +It is simples; When ShellHub is in developement mode (with the `SHELLHUB_ENV` +set to `development`), it will enable, through the `gateway`, a [URL to preview the +OpenAPI](http://localhost/openapi/) in any OpenAPI file after a page reload. + +## How to use? + +### Preview + +Its usage is simple too, you just need `up` the `shellhub-io/shellhub` containers in development +mode and access the URL. + +- Access the URL [http://localhost/openapi/preview](http://localhost/openapi) to check the preview. + +### Lint + +You can also linter the OpenAPI instances to check if everything is correct. You just need to run +the script called `lint` inside `scripts` folder. + +```bash +Lint the OpenAPI instances. + +Usage: + ./scripts/lint + +Instances: + community Lint the community instance + cloud Lint the cloud instance + enterprise Lint the enterprise instance + +Options: + --help Display this help message +``` + +### Mock + +If you need to mock a ShellHub instance, the `scripts` folder has another one called `mock`, +what will to make available a full service to you test the API based on instance you chose in +[http://localhost/openapi/mock](http://localhost/openapi/mock). + +```bash +Mock the OpenAPI instances. + +Usage: + ./scripts/mock + +Instances: + community Mock the community instance + cloud Mock the cloud instance + enterprise Mock the enterprise instance + +Options: + --help Display this help message +``` + +### Proxy + +If you need to check if the API contract is right, you can use the script called `proxy`, +what will to make available a full service to you send request to the API based on instance +you chose in [http://localhost/openapi/proxy](http://localhost/openapi/proxy). + +```bash +Proxy the OpenAPI instances. + +Usage: + ./scripts/proxy + +Instances: + community Proxy the community instance + cloud Proxy the cloud instance + enterprise Proxy the enterprise instance + +Options: + --help Display this help message + +``` + +### Generate + +You can generate a TypeScript client from the OpenAPI specification for any of the available instances using the +generate script. This will build the client and place it in the specified output directory. + +```bash +Generate the TypeScript client. + +Usage: + ./scripts/generate + +Options: + --help Display this help message +``` + +For example, to generate a client in the `../shellhub/ui/src/api/client/` directory, you would run: + +```bash +./scripts/generate ../shellhub/ui/src/api/client/ +``` + +The TypeScript client will be generated in the specified folder and can be used to interact with the ShellHub API. + +## Integrations + +### Community + +Today, the Community has a script called `openapi` to execute commands against the OpenAPI +spec provided by ShellHub `gateway` and this repository. + +

. . .

diff --git a/openapi/assets/logo.png b/openapi/assets/logo.png new file mode 100644 index 00000000000..3b7543130b8 Binary files /dev/null and b/openapi/assets/logo.png differ diff --git a/openapi/entrypoint.sh b/openapi/entrypoint.sh new file mode 100755 index 00000000000..5faff4ebff5 --- /dev/null +++ b/openapi/entrypoint.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env sh + +openapi-generator-cli version-manager set 7.16.0 + +air diff --git a/openapi/go.mod b/openapi/go.mod new file mode 100644 index 00000000000..822c3638194 --- /dev/null +++ b/openapi/go.mod @@ -0,0 +1,3 @@ +module github.com/shellhub-io/shellhub/openapi + +go 1.24.9 diff --git a/openapi/main.go b/openapi/main.go new file mode 100644 index 00000000000..2888bb70a3c --- /dev/null +++ b/openapi/main.go @@ -0,0 +1,30 @@ +package main + +import ( + "fmt" + "log" + "net/http" + "os/exec" +) + +func main() { + path := "spec/openapi.yaml" + + fmt.Printf("info: generating OpenAPI server from %s\n", path) + + if err := exec.Command("redocly", "bundle", path, "-o", "static/openapi.json").Run(); err != nil { //nolint:gosec + log.Fatalf("error: failed to bundle the openapi spec: %v", err) + } + + mux := http.NewServeMux() + + // NOTE: Gateway proxy to serve the OpenAPI spec and the Redoc UI. directly on the /openapi path. + fileServer := http.FileServer(http.Dir("./static")) + mux.Handle("/openapi/", http.StripPrefix("/openapi/", fileServer)) + + log.Printf("info: OpenAPI server started\n") + + if err := http.ListenAndServe(":8080", mux); err != nil { //nolint:gosec + log.Fatal(err) + } +} diff --git a/ui/openapitools.json b/openapi/openapitools.json similarity index 84% rename from ui/openapitools.json rename to openapi/openapitools.json index 92036ec7d89..8244df4250e 100644 --- a/ui/openapitools.json +++ b/openapi/openapitools.json @@ -2,6 +2,6 @@ "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", "spaces": 2, "generator-cli": { - "version": "6.0.0" + "version": "7.16.0" } } diff --git a/openapi/redocly.yaml b/openapi/redocly.yaml new file mode 100644 index 00000000000..7c4c6664ee2 --- /dev/null +++ b/openapi/redocly.yaml @@ -0,0 +1,33 @@ +# This is the Redocly configuration file, and it is used by Redocly CLI and other Redocly apps to control their behavior +# — from the strictness of the lint command, to how Redocly renders your docs. +# +# - Workflows uses it in the API registry to manage your APIs and control advanced features like region and link resolution. +# - Workflows and on-premise tools use it to apply your features and theme when building API docs. +# - Redocly's VS Code extension uses it for linting criteria, to apply custom settings for live documentation previews, +# and to identify the path API definition root files. +# +# https://redocly.com/docs/cli/configuration/ +organization: shellhub + +# This extends the recommended configuration values. +# https://redocly.com/docs/cli/rules/#recommended-config +extends: + - recommended + +rules: + no-ambiguous-paths: off + +apis: + community@v1: + root: ./spec/community-openapi.yaml + cloud@v1: + root: ./spec/cloud-openapi.yaml + enterprise@v1: + root: ./spec/enterprise-openapi.yaml + +features.openapi: + schemaExpansionLevel: 2 + generateCodeSamples: + languages: + - lang: curl + - lang: Go diff --git a/openapi/scripts/generate b/openapi/scripts/generate new file mode 100755 index 00000000000..87217a1a809 --- /dev/null +++ b/openapi/scripts/generate @@ -0,0 +1,51 @@ +#!/usr/bin/env sh + +usage() { + cat < +Options: + --help Display this help message +EOF + exit 1 +} + +generate() { + output_dir=$1 + + if [ "$output_dir" == "-h" ] || [ "$output_dir" == "--help" ]; then + usage + return 0 + fi + + if [ -z "$output_dir" ]; then + echo "Error: No output directory specified." + usage + return 1 + fi + + echo "Generating TypeScript client to $output_dir..." + + ./bin/docker-compose \ + run \ + --rm \ + -v "$output_dir":/tmp/client \ + --entrypoint "openapi-generator-cli generate --skip-validate-spec -i ./static/openapi.json -g typescript-axios -o /tmp/client --inline-schema-options RESOLVE_INLINE_ENUMS=true" \ + openapi + + if [ $? -ne 0 ]; then + echo "Error: OpenAPI generation failed." + echo "Check if the OpenAPI container is running and the spec is valid." + + return 1 + fi + + echo "Generation completed." +} + +main() { + generate $1 +} + +main $@ diff --git a/openapi/scripts/lint b/openapi/scripts/lint new file mode 100755 index 00000000000..f161bb520f0 --- /dev/null +++ b/openapi/scripts/lint @@ -0,0 +1,49 @@ +#!/usr/bin/env sh + +usage() { + cat < + +Instances: + community Lint the community instance + cloud Lint the cloud instance + enterprise Lint the enterprise instance + +Options: + --help Display this help message + +EOF + exit 1 +} + +main() { + instances="community cloud enterprise" + instance=$1 + + if [ "$1" == "--help" ]; then + usage + exit 0 + fi + + for i in $instances; do + if [ "$i" == "$instance" ]; then + echo "Linting $instance instance" + + ./bin/docker-compose exec openapi redocly lint ${@:2} spec/$instance-openapi.yaml --skip-rule no-ambiguous-paths operation-4xx-response + if [ $? -ne 0 ]; then + echo "error: $instance OpenAPI instance failed linting." + exit 1 + fi + + exit 0 + fi + done + + usage + exit 0 +} + +main $@ diff --git a/openapi/scripts/mock b/openapi/scripts/mock new file mode 100755 index 00000000000..44c74a94c88 --- /dev/null +++ b/openapi/scripts/mock @@ -0,0 +1,55 @@ +#!/usr/bin/env sh + +usage() { + cat < + +Instances: + community Mock the community instance + cloud Mock the cloud instance + enterprise Mock the enterprise instance + +Options: + --help Display this help message + +EOF + exit 1 +} + +mock() { + local instance=$1 + shift + local args="$@" + + ./bin/docker-compose exec openapi redocly bundle spec/$instance-openapi.yaml -o /tmp/openapi/mock/openapi.json + ./bin/docker-compose exec openapi prism mock -h 0.0.0.0 -p 4010 $args /tmp/openapi/mock/openapi.json +} + +main() { + instances="community cloud enterprise" + instance=$1 + shift + args="$@" + + if [ "$1" = "--help" ]; then + usage + exit 0 + fi + + for i in $instances; do + if [ "$i" = "$instance" ]; then + echo "mocking and watching for changes in $instance instance" + + mock $instance $args + exit $? + fi + done + + usage + exit 0 +} + +main "$@" diff --git a/openapi/scripts/proxy b/openapi/scripts/proxy new file mode 100755 index 00000000000..f5648fcd82c --- /dev/null +++ b/openapi/scripts/proxy @@ -0,0 +1,55 @@ +#!/usr/bin/env sh + +usage() { + cat < + +Instances: + community Proxy the community instance + cloud Proxy the cloud instance + enterprise Mock the enterprise instance + +Options: + --help Display this help message + +EOF + exit 1 +} + +proxy() { + local instance=$1 + shift + local args="$@" + + ./bin/docker-compose exec openapi redocly bundle spec/$instance-openapi.yaml -o /tmp/openapi/proxy/openapi.json + ./bin/docker-compose exec openapi prism proxy -h 0.0.0.0 -p 4020 --errors $args /tmp/openapi/proxy/openapi.json http://gateway:80 +} + +main() { + instances="community cloud enterprise" + instance=$1 + shift + args="$@" + + if [ "$1" = "--help" ]; then # Corrigido: == para = + usage + exit 0 + fi + + for i in $instances; do + if [ "$i" = "$instance" ]; then # Corrigido: == para = + echo "proxy and watching for changes in $instance instance" + + proxy $instance $args + exit $? + fi + done + + usage + exit 0 +} + +main "$@" diff --git a/openapi/spec/cloud-openapi.yaml b/openapi/spec/cloud-openapi.yaml new file mode 100644 index 00000000000..65b84cbf8eb --- /dev/null +++ b/openapi/spec/cloud-openapi.yaml @@ -0,0 +1,138 @@ +openapi: 3.0.3 +info: + title: ShellHub Cloud OpenAPI + description: | + > NOTICE: THE API IS NOT STABLE YET; ERROR AND INCONSISTENCIES MAY OCCUR. + + ShellHub Cloud OpenAPI specification. + + It documents all routes provided by ShellHub Cloud. + contact: + email: contato@ossystems.com.br + license: + name: Apache License 2.0 + url: https://github.com/shellhub-io/shellhub/blob/master/LICENSE.md + version: 0.20.0 +servers: + - url: / + description: ShellHub server as a Cloud instance. +components: + securitySchemes: + $ref: ./components/schemas/security.yaml +tags: + - name: cloud + description: Routes provided by ShellHub Cloud API. + - name: sessions + description: Routes related to session resource. + - name: users + description: Routes related to users resource. + - name: rules + description: Routes related to firewall rules resource + - name: announcements + description: Routes related to announcements resource + - name: mfa + description: Routes related to MFA + - name: api-keys + description: | + An API key is a unique identifier used to access protected endpoints. It + has a defined lifespan, is associated with a namespace, and cannot be + used to authenticate user routes. Typically, it replaces login-based + authentication when automating processes. + + To utilize an API key, it must be included in the `X-API-KEY` header. API + keys are preferred over JWT tokens and will be used even if one is + provided. + + Except for `GET` endpoints, API key-related routes cannot be + authenticated with an API key. + - name: tunnels + description: Routes related to tunnels resource. + - name: web-endpoints + description: Routes related to web-endpoints resource. + +paths: + $ref: community-openapi.yaml#/paths + /api/sessions/{uid}/close: + $ref: paths/api@sessions@{uid}@close.yaml + /api/sessions/{uid}/records/{seat}: + $ref: paths/api@sessions@{uid}@records@{seat}.yaml + /api/firewall/rules: + $ref: paths/api@firewall@rules.yaml + /api/firewall/rules/{id}: + $ref: paths/api@firewall@rules@{id}.yaml + /api/register: + $ref: paths/api@register.yaml + /api/user/resend_email: + $ref: paths/api@user@resend_email.yaml + /api/user/{uid}/update_password: + $ref: paths/api@user@{uid}@update_password.yaml + /api/user/validation_account: + $ref: paths/api@user@validation_account.yaml + /api/user/recover_password: + $ref: paths/api@user@recover_password.yaml + /api/billing/customer: + $ref: paths/api@billing@customer.yaml + /api/billing/subscription: + $ref: paths/api@billing@subscription.yaml + /api/billing/paymentmethod/attach: + $ref: paths/api@billing@paymentmethod@attach.yaml + /api/billing/paymentmethod/detach: + $ref: paths/api@billing@paymentmethod@detach.yaml + /api/billing/paymentmethod/default: + $ref: paths/api@billing@paymentmethod@default.yaml + /api/billing/device-choice: + $ref: paths/api@billing@device-choice.yaml + /api/billing/devices-most-used: + $ref: paths/api@billing@devices-most-used.yaml + /api/billing/report: + $ref: paths/api@billing@report.yaml + /api/billing/evaluate: + $ref: paths/api@billing@evaluate.yaml + /api/user/saml/auth: + $ref: paths/api@user@saml@auth.yaml + /api/user/mfa/auth: + $ref: paths/api@user@mfa@auth.yaml + /api/user/mfa/recover: + $ref: paths/api@user@mfa@recovery.yaml + /api/user/mfa/reset: + $ref: paths/api@user@mfa@reset.yaml + /api/user/mfa/generate: + $ref: paths/api@user@mfa@generate.yaml + /api/user/mfa/enable: + $ref: paths/api@user@mfa@enable.yaml + /api/user/mfa/disable: + $ref: paths/api@user@mfa@disable.yaml + /api/user/mfa/reset/{user-id}: + $ref: paths/api@user@mfa@reset@{user-id}.yaml + /api/user: + $ref: paths/api@user.yaml + /api/connector: + $ref: paths/api@connector.yaml + /api/connector/{uid}: + $ref: paths/api@connector@{uid}.yaml + /api/connector/{uid}/info: + $ref: paths/api@connector@{uid}@info.yaml + /api/namespaces/{tenant}/invitations/links: + $ref: paths/api@namespaces@{tenant}@invitations@links.yaml + /api/namespaces/{tenant}/members/{id}/accept-invite: + $ref: paths/api@namespaces@{tenant}@members@{id}@accept-invite.yaml + /api/namespaces/{tenant}/invitations/accept: + $ref: paths/api@namespaces@{tenant}@invitations@accept.yaml + /api/namespaces/{tenant}/invitations/decline: + $ref: paths/api@namespaces@{tenant}@invitations@decline.yaml + /api/namespaces/{tenant}/invitations/{user-id}: + $ref: paths/api@namespaces@{tenant}@invitations@{user-id}.yaml + /api/namespaces/{tenant}/invitations: + $ref: paths/api@namespaces@{tenant}@invitations.yaml + /api/users/invitations: + $ref: paths/api@users@invitations.yaml + /api/namespaces/{tenant}/support: + $ref: paths/api@namespaces@{tenant}@support.yaml + /api/web-endpoints: + $ref: paths/api@web-endpoints.yaml + /api/web-endpoints/{address}: + $ref: paths/api@web-endpoints@{address}.yaml + /api/devices/{uid}/tunnels: + $ref: paths/api@devices@{uid}@tunnels.yaml + /api/devices/{uid}/tunnels/{address}: + $ref: paths/api@devices@{uid}@tunnels@{address}.yaml diff --git a/openapi/spec/community-openapi.yaml b/openapi/spec/community-openapi.yaml new file mode 100644 index 00000000000..61a0478a232 --- /dev/null +++ b/openapi/spec/community-openapi.yaml @@ -0,0 +1,139 @@ +openapi: 3.0.3 +info: + title: ShellHub OpenAPI + description: | + > THE API IS NOT STABLE YET; ERROR AND INCONSISTENCIES MAY OCCUR. + + This is the OpenAPI specification for ShellHub community version. It documents the parameters and bodies for + performs HTTP requests to the ShellHub server endpoints related to users, namespaces, members, devices, tags, SSH, + sessions, etc. + + These endpoints require a JSON Web Token (JWT) as its security scheme, that means you need to send, to almost each + request, an HTTP header called `Authorization` with the `bearer` token. To obtains this token, uses the `/api/login` + route, fulfilling its request body to return that token with some essential information about the user whom logged + in. + contact: + name: ShellHub contact address. + email: contato@ossystems.com.br + license: + name: Apache License 2.0 + url: https://github.com/shellhub-io/shellhub/blob/master/LICENSE.md + version: 0.20.0 +servers: + - url: / + description: ShellHub server. +components: + securitySchemes: + $ref: ./components/schemas/security.yaml +tags: + - name: internal + description: Requests executed internally by ShellHub server. + - name: external + description: Requests executed by the ShellHub user. + - name: users + description: Routes related to user resource. + - name: devices + description: Routes related to device resource. + - name: containers + description: Routes related to containers resource. + - name: ssh + description: Routes related to SSH resource. + - name: api-keys + description: | + An API key is a unique identifier used to access protected endpoints. It + has a defined lifespan, is associated with a namespace, and cannot be + used to authenticate user routes. Typically, it replaces login-based + authentication when automating processes. + + To utilize an API key, it must be included in the `X-API-KEY` header. API + keys are preferred over JWT tokens and will be used even if one is + provided. + + Except for `GET` endpoints, API key-related routes cannot be + authenticated with an API key. + - name: system + description: Routes related to running instance. + +paths: + /info: + $ref: paths/api@info.yaml + /api/login: + $ref: paths/api@login.yaml + /api/auth/user: + $ref: paths/api@auth@user.yaml + /api/auth/ssh: + $ref: paths/api@auth@ssh.yaml + /api/auth/token/{tenant}: + $ref: paths/api@auth@token@{tenant}.yaml + /api/token/{tenant}: + $ref: paths/api@token@{tenant}.yaml + /api/devices/{uid}/accept: + $ref: paths/api@devices@{uid}@accept.yaml + /api/users: + $ref: paths/api@users.yaml + /api/users/{id}/data: + $ref: paths/api@users@{id}@data.yaml + /api/users/{id}/password: + $ref: paths/api@users@{id}@password.yaml + /api/users/security/{tenant}: + $ref: paths/api@users@security@{tenant}.yaml + /api/users/security: + $ref: paths/api@users@security.yaml + /api/devices: + $ref: paths/api@devices.yaml + /api/devices/{uid}: + $ref: paths/api@devices@{uid}.yaml + /api/devices/resolve: + $ref: paths/api@devices@resolve.yaml + /api/devices/{uid}/{status}: + $ref: paths/api@devices@{uid}@{status}.yaml + /internal/devices/{uid}/offline: + $ref: paths/api@devices@{uid}@offline.yaml + /api/sessions: + $ref: paths/api@sessions.yaml + /api/sessions/{uid}: + $ref: paths/api@sessions@{uid}.yaml + /api/sshkeys/public-keys: + $ref: paths/api@sshkeys@public-keys.yaml + /api/sshkeys/public-keys/{fingerprint}: + $ref: paths/api@sshkeys@public-keys@{fingerprint}.yaml + /api/stats: + $ref: paths/api@stats.yaml + /api/namespaces: + $ref: paths/api@namespaces.yaml + /api/namespaces/{tenant}: + $ref: paths/api@namespaces@{tenant}.yaml + /api/namespaces/{tenant}/members: + $ref: paths/api@namespaces@{tenant}@members.yaml + /api/namespaces/{tenant}/members/{uid}: + $ref: paths/api@namespaces@{tenant}@members@{uid}.yaml + /api/namespaces/api-key: + $ref: paths/api@namespaces@api-key.yaml + /api/namespaces/api-key/{key}: + $ref: paths/api@namespaces@api-key@{key}.yaml + /api/tags: + $ref: paths/api@tags.yaml + /api/tags/{name}: + $ref: paths/api@tags@{name}.yaml + /api/devices/{uid}/tags/{name}: + $ref: paths/api@devices@{uid}@tags@{name}.yaml + /api/namespaces/{tenant}/tags: + $ref: paths/api@namespaces@{tenant}@tags.yaml + /api/namespaces/{tenant}/tags/{name}: + $ref: paths/api@namespaces@{tenant}@tags@{name}.yaml + /api/namespaces/{tenant}/devices/{uid}/tags/{name}: + $ref: paths/api@namespaces@{tenant}@devices@{uid}@tags@{name}.yaml + /api/namespaces/{tenant}/containers/{uid}/tags/{name}: + $ref: paths/api@namespaces@{tenant}@containers@{uid}@tags@{name}.yaml + /api/announcements: + $ref: paths/api@announcements.yaml + /api/announcements/{uuid}: + $ref: paths/api@announcements@{uuid}.yaml + /api/containers: + $ref: paths/api@containers.yaml + /api/containers/{uid}: + $ref: paths/api@containers@{uid}.yaml + /api/containers/{uid}/{status}: + $ref: paths/api@containers@{uid}@{status}.yaml + /api/setup: + $ref: paths/api@setup.yaml diff --git a/openapi/spec/components/parameters/path/announcementUUID.yaml b/openapi/spec/components/parameters/path/announcementUUID.yaml new file mode 100644 index 00000000000..508aed6625f --- /dev/null +++ b/openapi/spec/components/parameters/path/announcementUUID.yaml @@ -0,0 +1,5 @@ +name: uuid +schema: + $ref: ../../schemas/announcementUUID.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/customToken.yaml b/openapi/spec/components/parameters/path/customToken.yaml new file mode 100644 index 00000000000..165037d6265 --- /dev/null +++ b/openapi/spec/components/parameters/path/customToken.yaml @@ -0,0 +1,6 @@ +name: authCustomToken +description: custom token +schema: + $ref: ../../schemas/customToken.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/deviceStatusPath.yaml b/openapi/spec/components/parameters/path/deviceStatusPath.yaml new file mode 100644 index 00000000000..a15f51f21b8 --- /dev/null +++ b/openapi/spec/components/parameters/path/deviceStatusPath.yaml @@ -0,0 +1,6 @@ +name: status +description: Device's status +schema: + $ref: ../../schemas/deviceStatus.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/deviceTagPath.yaml b/openapi/spec/components/parameters/path/deviceTagPath.yaml new file mode 100644 index 00000000000..cac6233f337 --- /dev/null +++ b/openapi/spec/components/parameters/path/deviceTagPath.yaml @@ -0,0 +1,6 @@ +name: tag +description: Device's tag name +schema: + $ref: ../../schemas/tag.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/deviceUIDPath.yaml b/openapi/spec/components/parameters/path/deviceUIDPath.yaml new file mode 100644 index 00000000000..3b7cfb7b1c3 --- /dev/null +++ b/openapi/spec/components/parameters/path/deviceUIDPath.yaml @@ -0,0 +1,6 @@ +name: uid +description: Device's UID +schema: + $ref: ../../schemas/deviceUID.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/namespaceMemberIDPath.yaml b/openapi/spec/components/parameters/path/namespaceMemberIDPath.yaml new file mode 100644 index 00000000000..3b26287fc5f --- /dev/null +++ b/openapi/spec/components/parameters/path/namespaceMemberIDPath.yaml @@ -0,0 +1,6 @@ +name: uid +description: Member's ID +schema: + type: string +required: true +in: path diff --git a/openapi/spec/components/parameters/path/namespaceTenantIDPath.yaml b/openapi/spec/components/parameters/path/namespaceTenantIDPath.yaml new file mode 100644 index 00000000000..33b641a7e5a --- /dev/null +++ b/openapi/spec/components/parameters/path/namespaceTenantIDPath.yaml @@ -0,0 +1,6 @@ +name: tenant +description: Namespace's tenant ID +schema: + $ref: ../../schemas/namespaceTenantID.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/publicKeyFingerprintPath.yaml b/openapi/spec/components/parameters/path/publicKeyFingerprintPath.yaml new file mode 100644 index 00000000000..7b59d1ba908 --- /dev/null +++ b/openapi/spec/components/parameters/path/publicKeyFingerprintPath.yaml @@ -0,0 +1,6 @@ +name: fingerprint +description: Public key's fingerprint. +schema: + $ref: ../../schemas/publickKeyFingerprint.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/sessionUIDPath.yaml b/openapi/spec/components/parameters/path/sessionUIDPath.yaml new file mode 100644 index 00000000000..cc1f59eccd4 --- /dev/null +++ b/openapi/spec/components/parameters/path/sessionUIDPath.yaml @@ -0,0 +1,5 @@ +name: uid +schema: + $ref: ../../schemas/sessionUID.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/tagPath.yaml b/openapi/spec/components/parameters/path/tagPath.yaml new file mode 100644 index 00000000000..9835578fd90 --- /dev/null +++ b/openapi/spec/components/parameters/path/tagPath.yaml @@ -0,0 +1,6 @@ +name: tag +description: Tag's name. +schema: + $ref: ../../schemas/tag.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/path/tunnelAddressPath.yaml b/openapi/spec/components/parameters/path/tunnelAddressPath.yaml new file mode 100644 index 00000000000..90ae074095c --- /dev/null +++ b/openapi/spec/components/parameters/path/tunnelAddressPath.yaml @@ -0,0 +1,6 @@ +name: address +description: Tunnel's address +schema: + $ref: ../../schemas/tunnelAddress.yaml +required: true +in: path diff --git a/openapi/spec/components/parameters/query/filterQuery.yaml b/openapi/spec/components/parameters/query/filterQuery.yaml new file mode 100644 index 00000000000..14f39c38918 --- /dev/null +++ b/openapi/spec/components/parameters/query/filterQuery.yaml @@ -0,0 +1,75 @@ +name: filter +description: | + Filter field receives a JSON object enconded as base64 string for limit a search. + + The JSON enconded must follow these interafaces: + ```typescript + interface ParamProperty { + name: string; + operator: "contains" | "eq" | "bool" | "gt" | "lt"; + value: string; + } + + interface ParamOperator { + name: "and" | "or"; + } + + interface Filter { + type: "property" | "operator"; + param: ParamOperator | ParamProperty; + } + + interface FilterList { + Filters: Array; + } + + ``` + + ## Examples + + This is a example to filter and get only the resource what property "confirmed" is "true" + ```json + [ + { + "type": "property", + "params": { + "name": "confirmed", + "operator": "bool", + "value": "true" + } + } + ] + ``` + + This one, filter resource by the property "id" inside "info" structure when it is equal to "manjaro" and online property is set to "true" + ```json + [ + { + "type": "property", + "params": { + "name": "info.id", + "operator": "eq", + "value": "manjaro" + } + }, + { + "type": "property", + "params": { + "name": "online", + "operator": "bool", + "value": "true" + } + }, + { + "type": "operator", + "params": { + "name": "and" + } + } + ] + ``` +schema: + type: string + format: byte +required: false +in: query diff --git a/openapi/spec/components/parameters/query/orderByQuery.yaml b/openapi/spec/components/parameters/query/orderByQuery.yaml new file mode 100644 index 00000000000..63924883796 --- /dev/null +++ b/openapi/spec/components/parameters/query/orderByQuery.yaml @@ -0,0 +1,10 @@ +name: order_by +in: query +description: Sort order (asc or desc) +required: false +schema: + type: string + enum: + - asc + - desc + default: desc diff --git a/openapi/spec/components/parameters/query/pageQuery.yaml b/openapi/spec/components/parameters/query/pageQuery.yaml new file mode 100644 index 00000000000..3744d543289 --- /dev/null +++ b/openapi/spec/components/parameters/query/pageQuery.yaml @@ -0,0 +1,7 @@ +name: page +description: Page number +schema: + type: integer + minimum: 1 + default: 1 +in: query diff --git a/openapi/spec/components/parameters/query/perPageQuery.yaml b/openapi/spec/components/parameters/query/perPageQuery.yaml new file mode 100644 index 00000000000..7bd2e981621 --- /dev/null +++ b/openapi/spec/components/parameters/query/perPageQuery.yaml @@ -0,0 +1,8 @@ +name: per_page +description: Items per page +schema: + type: integer + minimum: 1 + maximum: 100 + default: 10 +in: query diff --git a/openapi/spec/components/parameters/query/sortByQuery.yaml b/openapi/spec/components/parameters/query/sortByQuery.yaml new file mode 100644 index 00000000000..9084e2235c4 --- /dev/null +++ b/openapi/spec/components/parameters/query/sortByQuery.yaml @@ -0,0 +1,12 @@ +name: sort_by +in: query +description: Field to sort by +required: false +schema: + type: string + enum: + - created_at + - updated_at + - address + - uid + default: created_at diff --git a/openapi/spec/components/responses/200.yaml b/openapi/spec/components/responses/200.yaml new file mode 100644 index 00000000000..b037c298fea --- /dev/null +++ b/openapi/spec/components/responses/200.yaml @@ -0,0 +1 @@ +description: Success diff --git a/openapi/spec/components/responses/400.yaml b/openapi/spec/components/responses/400.yaml new file mode 100644 index 00000000000..acb8b4290ad --- /dev/null +++ b/openapi/spec/components/responses/400.yaml @@ -0,0 +1 @@ +description: Bad request diff --git a/openapi/spec/components/responses/401.yaml b/openapi/spec/components/responses/401.yaml new file mode 100644 index 00000000000..40f991fddc7 --- /dev/null +++ b/openapi/spec/components/responses/401.yaml @@ -0,0 +1,11 @@ +description: Unauthorized +content: + application/json: + schema: + type: object + properties: + message: + description: Error message + type: string + example: + message: missing or malformed jwt or API token diff --git a/openapi/spec/components/responses/402.yaml b/openapi/spec/components/responses/402.yaml new file mode 100644 index 00000000000..5e394d44525 --- /dev/null +++ b/openapi/spec/components/responses/402.yaml @@ -0,0 +1 @@ +description: Payment required diff --git a/openapi/spec/components/responses/403.yaml b/openapi/spec/components/responses/403.yaml new file mode 100644 index 00000000000..d977831a01f --- /dev/null +++ b/openapi/spec/components/responses/403.yaml @@ -0,0 +1 @@ +description: Forbidden diff --git a/openapi/spec/components/responses/404.yaml b/openapi/spec/components/responses/404.yaml new file mode 100644 index 00000000000..9f8135231f8 --- /dev/null +++ b/openapi/spec/components/responses/404.yaml @@ -0,0 +1 @@ +description: Not found diff --git a/openapi/spec/components/responses/406.yaml b/openapi/spec/components/responses/406.yaml new file mode 100644 index 00000000000..39cd670edb2 --- /dev/null +++ b/openapi/spec/components/responses/406.yaml @@ -0,0 +1 @@ +description: Not Acceptable diff --git a/openapi/spec/components/responses/409.yaml b/openapi/spec/components/responses/409.yaml new file mode 100644 index 00000000000..3399c0afe52 --- /dev/null +++ b/openapi/spec/components/responses/409.yaml @@ -0,0 +1 @@ +description: Conflict diff --git a/openapi/spec/components/responses/500.yaml b/openapi/spec/components/responses/500.yaml new file mode 100644 index 00000000000..966b8805d17 --- /dev/null +++ b/openapi/spec/components/responses/500.yaml @@ -0,0 +1,11 @@ +description: Internal error +content: + application/json: + schema: + type: object + properties: + message: + description: Error message. + type: string + example: + message: Internal Server Error diff --git a/openapi/spec/components/responses/conflictFields.yaml b/openapi/spec/components/responses/conflictFields.yaml new file mode 100644 index 00000000000..d65004fa15f --- /dev/null +++ b/openapi/spec/components/responses/conflictFields.yaml @@ -0,0 +1,10 @@ +description: Conflict Fields +content: + application/json: + schema: + type: array + items: + type: string + example: + - username + - email diff --git a/openapi/spec/components/responses/invalidFields.yaml b/openapi/spec/components/responses/invalidFields.yaml new file mode 100644 index 00000000000..b80f7258149 --- /dev/null +++ b/openapi/spec/components/responses/invalidFields.yaml @@ -0,0 +1,10 @@ +description: Invalid Fields +content: + application/json: + schema: + type: array + items: + type: string + example: + - username + - email diff --git a/openapi/spec/components/schemas/announcement.yaml b/openapi/spec/components/schemas/announcement.yaml new file mode 100644 index 00000000000..2dfccfa22a6 --- /dev/null +++ b/openapi/spec/components/schemas/announcement.yaml @@ -0,0 +1,12 @@ +description: Announcement. +type: object +properties: + uuid: + $ref: announcementUUID.yaml + title: + $ref: announcementTitle.yaml + content: + $ref: announcementContent.yaml + date: + type: string + format: date-time diff --git a/openapi/spec/components/schemas/announcementContent.yaml b/openapi/spec/components/schemas/announcementContent.yaml new file mode 100644 index 00000000000..b14e46f6f5e --- /dev/null +++ b/openapi/spec/components/schemas/announcementContent.yaml @@ -0,0 +1,74 @@ +description: Announcement description. +type: string +example: |- + # Pendent ignarus + + ## Inmittitur insula praecipiunt viro odiumque campis securus + + Lorem markdownum quamvis Sipylus sanguine, *feramus deam* virtus nosse clamor + superbia me vivit, lumen. Quid clamore: hi quem. Dea dedit coram, patriam crura + dum necis de exanimem. Sub habendus, iubet gentis transformat iter; latet nemus + es somnum praecepta saxa. + + if (firewire(daw, word_southbridge)) { + art.api.displayHdtvRom(access * minimize_hover_exif, + animated_redundancy); + } + var drop = real; + var pci_mtu_binary = dropPartitionGigabit.open_sound_computer(metal - + repositoryUrl, directx_memory + grep_remote_ram, + zifFlopsDevice.bar.wimax(ribbonVpiSip, commerce, 65)); + if (web) { + sectorThird = ipComputerCharacter(5, pppoe_raw_brouter( + vpnAnimatedSubnet)); + } else { + volumeRate += zebibyte_wired * responsiveIo; + vram_undo += function_ppc_ole.leaf_graphic(broadbandCleanUdp); + pci.compression_source_adsl += frozenServerNvram(tape_io, transistor); + } + + ## Operosa si inque + + Iacuere ut frontem *primum* nympha nec, ex mihi; nec in! Poma dolori incomitata + Nec. Sua senex quod, flavescunt libro nostris cum. + + 1. Subiere bracchia ergo tumulavit namque inania + 2. Ictu bellum + 3. Feratis matrumque inritata Ophionides fila agricolis quique + + ## Quaeritis sitim + + Velatus quae prodest manet reparabile antraque Pallas viridique + [ducentem](http://modo-sic.net/) arcus. Alta per, cum. **Est** vinctum animae + anima monte Propoetides praevitiat aliter montibus sua colligit [vasta + rabiemque](http://mox.com/pereunt) habet. Edere iste aut, peregrina feramus, + iusserat sibilat huius, et. + + - Corpora a sequor muneris in pietas abdidit + - Terribili tantum + - Ubi potitur aberat aut animi quaesita manat + - Eas duros valerem convivia et videt mirator + - Sonitum et ait digna persequitur Trachinia est + - Ipse corpora et totis temptaretque neque + + ## Iterum in tinnulaque frondes culpae spumantis + + Sub Mavortia illa nudos obstupuere **quantum** secum mitia Apolline tumere, non + adhuc audistis ferre. Accepto vocabant movit spe; vere medii adloquitur vano + pernocte, everterit harundine simul, sortis causa. Animam [duo + causa](http://augustumme.com/puppim) mihi solent in ego quaecumque tarda, et + quas: aere quae Avernae Amphitryoniaden. + + > Ventos fixit culpam vocavit iubasque. Sic venit tam ferinas freto pallescere + > vadit: tamen Editus nil te, habet tantum minatur species et enixa. Conplevit + > tenens Ladon, fugit studioque ausae Cerberon non maiora, tollit adhuc ait in + > quae Atlantiades altae pulcherrime. Anima signa membra cursus, grandior + > morientis fidem. + + Maduere in lacrimis in ultima verbaque pelle. Cervus suas tauro eripitur traho + scelerata Hippason et est posse exuit quem per possent valet Alcmena annis et, + ut. Utrumque nam nitor sua ultima ferox liquerunt stetimusque **Semeles ianua**. + Illi poma implesset sive: inde sub contingere veneratur salientis pectore mirata + et Neptunum veniet turis exitium. Quem sensit iam reclusa plus resurgere nescio + miratur ibat flamma [tuentes Minervae + fortibus](http://intervitae.org/caputex.aspx) canebat et. diff --git a/openapi/spec/components/schemas/announcementShort.yaml b/openapi/spec/components/schemas/announcementShort.yaml new file mode 100644 index 00000000000..037304f75d2 --- /dev/null +++ b/openapi/spec/components/schemas/announcementShort.yaml @@ -0,0 +1,12 @@ +type: object +properties: + uuid: + $ref: announcementUUID.yaml + title: + $ref: announcementTitle.yaml + date: + type: string + format: date-time +example: + title: Lorem ipsum dolor sit amet, consectetur adipiscing elit. + date: 2017-07-21T17:32:28Z diff --git a/openapi/spec/components/schemas/announcementTitle.yaml b/openapi/spec/components/schemas/announcementTitle.yaml new file mode 100644 index 00000000000..77293033b8c --- /dev/null +++ b/openapi/spec/components/schemas/announcementTitle.yaml @@ -0,0 +1,3 @@ +description: Announcement title. +type: string +maxLength: 90 diff --git a/openapi/spec/components/schemas/announcementUUID.yaml b/openapi/spec/components/schemas/announcementUUID.yaml new file mode 100644 index 00000000000..df81adb1cfd --- /dev/null +++ b/openapi/spec/components/schemas/announcementUUID.yaml @@ -0,0 +1,4 @@ +description: Announcement UUID. +type: string +format: uuid +example: 3dd0d1f8-8246-4519-b11a-a3dd33717f65 diff --git a/openapi/spec/components/schemas/apiKey.yaml b/openapi/spec/components/schemas/apiKey.yaml new file mode 100644 index 00000000000..4c46c06033c --- /dev/null +++ b/openapi/spec/components/schemas/apiKey.yaml @@ -0,0 +1,41 @@ +type: object +properties: + tenant_id: + description: The tenant ID of the namespace with which the key is associated. + type: string + format: uuid + example: 3dd0d1f8-8246-4519-b11a-a3dd33717f65 + created_by: + description: The ID of the user who created the API key. + type: string + pattern: ^[0-9a-f]{24}$ + example: 507f1f77bcf86cd799439011 + role: + type: string + description: | + The role of the key. It serves as a "level" indicating which endpoints the key can + access. + example: owner + name: + type: string + description: | + The name of the API key. This serves as an "external ID" since the UUID + will never be returned. It is unique per namespace. + example: dev + expires_in: + description: | + Epoch time until expiration. It for unlimited keys. + type: integer + minimum: -1 + example: 1707958989 + created_at: + type: string + description: The UTC date when the key was created. + format: date + example: 2020-05-01 + updated_at: + type: string + description: | + The UTC date when the key was last updated. It is updated whenever the key is modified. + format: date + example: 2020-05-01 diff --git a/openapi/spec/components/schemas/apiKeyCreate.yaml b/openapi/spec/components/schemas/apiKeyCreate.yaml new file mode 100644 index 00000000000..1bfb672513c --- /dev/null +++ b/openapi/spec/components/schemas/apiKeyCreate.yaml @@ -0,0 +1,38 @@ +type: object +properties: + name: + type: string + description: | + The name of the API key. This serves as an "external ID" since the UUID + will never be returned. It is unique per namespace. + maxLength: 20 + minLength: 3 + example: dev + expires_at: + type: integer + description: Number of days until expiration. Use -1 for no expiration. + enum: + - -1 + - 30 + - 60 + - 90 + - 365 + example: 30 + role: + type: string + description: | + The role of the key. It serves as a "level" indicating which endpoints + the key can access. It must be less or equal than the user's role. Leave + it blank to use the user's role. + example: owner + key: + type: string + format: uuidv4 + description: | + An optional and unique value to be used as the API key's internal identifier. This value + is the "internal ID" and will NEVER be returned to the client. Leave it + blank for a random one to be generated. + example: c629572a-b643-4301-90fe-4572b00d007e +required: + - name + - expires_at diff --git a/openapi/spec/components/schemas/apiKeyUpdate.yaml b/openapi/spec/components/schemas/apiKeyUpdate.yaml new file mode 100644 index 00000000000..b6fc9b8690d --- /dev/null +++ b/openapi/spec/components/schemas/apiKeyUpdate.yaml @@ -0,0 +1,16 @@ +type: object +properties: + name: + type: string + description: | + The name of the API key. This serves as an "external ID" since the UUID + will never be returned. It is unique per namespace. + maxLength: 20 + minLength: 3 + example: dev + role: + type: string + description: | + The role of the key. It serves as a "level" indicating which endpoints the key can + access. It must be less or equal than the user's role. + example: owner diff --git a/openapi/spec/components/schemas/apiKeyWithID.yaml b/openapi/spec/components/schemas/apiKeyWithID.yaml new file mode 100644 index 00000000000..901402547f9 --- /dev/null +++ b/openapi/spec/components/schemas/apiKeyWithID.yaml @@ -0,0 +1,48 @@ +type: object +properties: + id: + description: | + The UUID of the API key. It is used only internally and, except for the + create operation, is never returned to the client. + type: string + format: uuidv4 + example: c629572a-b643-4301-90fe-4572b00d007e + tenant_id: + description: The tenant ID of the namespace with which the key is associated. + type: string + format: uuid + example: 3dd0d1f8-8246-4519-b11a-a3dd33717f65 + created_by: + description: The ID of the user who created the API key. + type: string + pattern: ^[0-9a-f]{24}$ + example: 507f1f77bcf86cd799439011 + role: + type: string + description: | + The role of the key. It serves as a "level" indicating which endpoints the key can + access. + example: owner + name: + type: string + description: | + The name of the API key. This serves as an "external ID" since the UUID + will never be returned. It is unique per namespace. + example: dev + expires_in: + description: | + Epoch time until expiration. It for unlimited keys. + type: integer + minimum: -1 + example: 1707958989 + created_at: + type: string + description: The UTC date when the key was created. + format: date + example: 2020-05-01 + updated_at: + type: string + description: | + The UTC date when the key was last updated. It is updated whenever the key is modified. + format: date + example: 2020-05-01 diff --git a/openapi/spec/components/schemas/billingError.yaml b/openapi/spec/components/schemas/billingError.yaml new file mode 100644 index 00000000000..a4d1dce290a --- /dev/null +++ b/openapi/spec/components/schemas/billingError.yaml @@ -0,0 +1,18 @@ +description: Error +content: + application/json: + schema: + oneOf: + - type: object + properties: + message: + description: Error's message. + type: string + - type: object + properties: + message: + description: Error's message. + type: string + code: + description: Error's code. + type: string diff --git a/openapi/spec/components/schemas/codes.yaml b/openapi/spec/components/schemas/codes.yaml new file mode 100644 index 00000000000..d1cb290c3c5 --- /dev/null +++ b/openapi/spec/components/schemas/codes.yaml @@ -0,0 +1,11 @@ +description: codes +type: array +items: + type: string +example: + - HW2wlxV40B + - 2xsmMUHHHb + - DTQgVsaVac + - KXPBoXvuWD + - QQYTPfotBi + - XWiKBEPyb4 diff --git a/openapi/spec/components/schemas/device.yaml b/openapi/spec/components/schemas/device.yaml new file mode 100644 index 00000000000..a3875e2654e --- /dev/null +++ b/openapi/spec/components/schemas/device.yaml @@ -0,0 +1,66 @@ +type: object +properties: + uid: + $ref: deviceUID.yaml + name: + description: | + Device's name + + + By default, the name is the device's MAC address when it just added. + type: string + example: example + identity: + $ref: deviceIdentity.yaml + info: + description: Device's info + $ref: deviceInfo.yaml + public_key: + description: Device's public key. + type: string + example: '-----BEGIN RSA PUBLIC KEY-----MIIBCgKCAQEA0vH2Bob3mn+uWVaHlOoZD8ai01W6VnRTnXlnHVF7Ny1Vb7pl1Hc4D8bsBhb1vt7aZOYHbCyDR2r5lsrWXCELE8pY8vzfFDA+jNrLbBCJ66E1BcmTqfXCJcLospWD2lIAwU2O7IPxwZujuVkHrF8nYuEFsKeG60QTWNS++RTqydqe2KmFMEdWCQmYPm/ykN871fSR9+PzoRJMYWidY6Szn+X2ardGmS/Ldhl/PEu9h7xjcQXANWz6yV/RVReGVkLcK6TxlfuxgdpbsWAx+cS52P7xWrshNefHqjpdlm3KNbo6vqfTpU8Ld/FFISXXaa1Md5GyAHF+jzuRzQ5z5aKBGwIDAQAB-----END RSA PUBLIC KEY-----' + tenant_id: + $ref: namespaceTenantID.yaml + last_seen: + description: Device's last seen date + type: string + format: date-time + example: 2020-01-01T00:00:00Z + online: + description: Device's availability status + type: boolean + example: true + namespace: + description: Device's namespace + $ref: namespaceName.yaml + status: + $ref: deviceStatus.yaml + status_update_at: + $ref: deviceUpdateAt.yaml + created_at: + description: Device's creation date + type: string + format: date-time + example: 2020-01-01T00:00:00Z + remote_addr: + description: Device's remote address + type: string + example: 127.0.0.1 + position: + description: Device's geolocation position + type: object + properties: + latitude: + description: Device's latitude position + type: number + example: -31.7566628 + longitude: + description: Device's longitude position + type: number + example: -52.322474 + tags: + $ref: deviceTags.yaml + public_url: + $ref: devicePublicURL.yaml + acceptable: + $ref: deviceAcceptable.yaml diff --git a/openapi/spec/components/schemas/deviceAcceptable.yaml b/openapi/spec/components/schemas/deviceAcceptable.yaml new file mode 100644 index 00000000000..30da80738d7 --- /dev/null +++ b/openapi/spec/components/schemas/deviceAcceptable.yaml @@ -0,0 +1,9 @@ +description: | + Device's acceptable + + The value "acceptable" is based on the number of devices removed and already accepted into a namespace. All devices + are "acceptable" unless the "namespace.max_devices" is reached. This limit is set based on the sum up of accepted and + removed devices into the namespace. When this limit is reached, only removed devices between 720 hours or 30 days are + set to "acceptable". +type: boolean +example: false diff --git a/openapi/spec/components/schemas/deviceIdentity.yaml b/openapi/spec/components/schemas/deviceIdentity.yaml new file mode 100644 index 00000000000..9d935eda6b3 --- /dev/null +++ b/openapi/spec/components/schemas/deviceIdentity.yaml @@ -0,0 +1,8 @@ +description: Device's identity +type: object +properties: + mac: + description: Device's MAC + type: string + pattern: ^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$ + example: '00:00:00:00:00:00' diff --git a/openapi/spec/components/schemas/deviceInfo.yaml b/openapi/spec/components/schemas/deviceInfo.yaml new file mode 100644 index 00000000000..06d226b484b --- /dev/null +++ b/openapi/spec/components/schemas/deviceInfo.yaml @@ -0,0 +1,26 @@ +description: Device's info +type: object +properties: + id: + description: Device's OS name + type: string + example: example + pretty_name: + description: Device's OS pretty name + type: string + example: linux + version: + description: Device's OS version + type: string + example: latest + arch: + description: Device's OS arch + type: string + example: x86_64 + platform: + description: Device's OS platform + type: string + enum: + - docker + - native + example: docker diff --git a/openapi/spec/components/schemas/deviceName.yaml b/openapi/spec/components/schemas/deviceName.yaml new file mode 100644 index 00000000000..598bf291ad5 --- /dev/null +++ b/openapi/spec/components/schemas/deviceName.yaml @@ -0,0 +1,8 @@ +description: | + Device's name + + + By default, the name is the device's MAC address when it just + added. +type: string +example: example diff --git a/openapi/spec/components/schemas/deviceNamespaceName.yaml b/openapi/spec/components/schemas/deviceNamespaceName.yaml new file mode 100644 index 00000000000..6427bb07bee --- /dev/null +++ b/openapi/spec/components/schemas/deviceNamespaceName.yaml @@ -0,0 +1,3 @@ +description: Device's namespace name +type: string +example: examplespace diff --git a/openapi/spec/components/schemas/devicePublicURL.yaml b/openapi/spec/components/schemas/devicePublicURL.yaml new file mode 100644 index 00000000000..ff2e5caa419 --- /dev/null +++ b/openapi/spec/components/schemas/devicePublicURL.yaml @@ -0,0 +1,3 @@ +description: Device's public URL status. +type: boolean +example: false diff --git a/openapi/spec/components/schemas/deviceStatus.yaml b/openapi/spec/components/schemas/deviceStatus.yaml new file mode 100644 index 00000000000..866bcca8878 --- /dev/null +++ b/openapi/spec/components/schemas/deviceStatus.yaml @@ -0,0 +1,9 @@ +description: Device's status +type: string +enum: + - accepted + - rejected + - pending + - removed + - unused +example: accepted diff --git a/openapi/spec/components/schemas/deviceTags.yaml b/openapi/spec/components/schemas/deviceTags.yaml new file mode 100644 index 00000000000..59fa77f67bf --- /dev/null +++ b/openapi/spec/components/schemas/deviceTags.yaml @@ -0,0 +1,10 @@ +description: Device's Tags list +type: array +items: + $ref: ./tag.yaml +minItems: 1 +maxItems: 3 +example: + - name: tag1 + - name: tag2 + - name: tag3 diff --git a/openapi/spec/components/schemas/deviceToken.yaml b/openapi/spec/components/schemas/deviceToken.yaml new file mode 100644 index 00000000000..4c6480fef16 --- /dev/null +++ b/openapi/spec/components/schemas/deviceToken.yaml @@ -0,0 +1,4 @@ +description: Device's token +type: string +example: | + eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.iUCROHt6JHANdtzT6aOuUgOqVFRalOW20SbzRsn5SkI diff --git a/openapi/spec/components/schemas/deviceUID.yaml b/openapi/spec/components/schemas/deviceUID.yaml new file mode 100644 index 00000000000..c2ef88f1091 --- /dev/null +++ b/openapi/spec/components/schemas/deviceUID.yaml @@ -0,0 +1,4 @@ +description: Device's UID +type: string +pattern: ^[0-9a-f]{64}$ +example: 13b0c8ea878e61ff849db69461795006a9594c8f6a6390ce0000100b0c9d7d0a diff --git a/openapi/spec/components/schemas/deviceUpdateAt.yaml b/openapi/spec/components/schemas/deviceUpdateAt.yaml new file mode 100644 index 00000000000..0a3f34403dd --- /dev/null +++ b/openapi/spec/components/schemas/deviceUpdateAt.yaml @@ -0,0 +1,4 @@ +description: Device's status update date +type: string +format: date-format +example: 2020-05-01T00:00:00.000Z diff --git a/openapi/spec/components/schemas/firewallRulesRequest.yaml b/openapi/spec/components/schemas/firewallRulesRequest.yaml new file mode 100644 index 00000000000..6cda53f844e --- /dev/null +++ b/openapi/spec/components/schemas/firewallRulesRequest.yaml @@ -0,0 +1,55 @@ +type: object +properties: + action: + description: Firewall rule's action + type: string + enum: + - allow + - deny + example: allow + active: + description: Firewall rule active's status + type: boolean + example: true + filter: + description: Firewall rule's filter + oneOf: + - type: object + properties: + hostname: + description: Firewall rule's hostname + type: string + example: .* + required: + - hostname + - type: object + properties: + tags: + description: Firewall's rule tags + type: array + items: + $ref: tag.yaml + minItems: 1 + maxItems: 3 + required: + - tags + priority: + description: Firewall rule's priority + type: integer + minimum: 0 + example: 1 + source_ip: + description: Firewall rule's source IP regexp + type: string + example: .* + username: + description: Firewall rule's username regexp + type: string + example: .* +required: + - action + - active + - filter + - priority + - source_ip + - username diff --git a/openapi/spec/components/schemas/firewallRulesResponse.yaml b/openapi/spec/components/schemas/firewallRulesResponse.yaml new file mode 100644 index 00000000000..4f9ea33e5f7 --- /dev/null +++ b/openapi/spec/components/schemas/firewallRulesResponse.yaml @@ -0,0 +1,62 @@ +type: object +properties: + id: + description: Firewall rule's ID. + type: string + example: 507f1f77bcf86cd799439011 + tenant_id: + $ref: namespaceTenantID.yaml + action: + description: Firewall rule's action + type: string + enum: + - allow + - deny + example: allow + active: + description: Firewall rule active's status + type: boolean + example: true + filter: + description: Firewall rule's filter + oneOf: + - type: object + properties: + hostname: + description: Firewall rule's hostname + type: string + example: .* + required: + - hostname + - type: object + properties: + tags: + description: Firewall's rule tags + type: array + items: + $ref: tag.yaml + minItems: 1 + maxItems: 3 + required: + - tags + priority: + description: Firewall rule's priority + type: integer + minimum: 0 + example: 1 + source_ip: + description: Firewall rule's source IP regexp + type: string + example: .* + username: + description: Firewall rule's username regexp + type: string + example: .* +required: + - tenant_id + - action + - active + - filter + - priority + - source_ip + - username diff --git a/openapi/spec/components/schemas/info.yaml b/openapi/spec/components/schemas/info.yaml new file mode 100644 index 00000000000..75c29a907eb --- /dev/null +++ b/openapi/spec/components/schemas/info.yaml @@ -0,0 +1,34 @@ +type: object +properties: + version: + description: The current version of ShellHub. + type: string + example: 'v0.20.0' + endpoints: + type: object + description: Network endpoints for the ShellHub instance. + properties: + ssh: + description: The SSH endpoint where devices connect. + type: string + example: 'localhost:2222' + api: + description: The API endpoint for managing ShellHub configurations. + type: string + example: 'localhost:8080' + setup: + description: Indicates whether the instance setup is complete. + type: boolean + example: true + authentication: + description: Authentication methods available for the ShellHub instance. + type: object + properties: + local: + description: Indicates if local authentication using email and password is enabled. + type: boolean + example: true + saml: + description: Indicates if SAML-based single sign-on (SSO) is enabled. + type: boolean + example: false diff --git a/openapi/spec/components/schemas/jwt.yaml b/openapi/spec/components/schemas/jwt.yaml new file mode 100644 index 00000000000..2d32ba7b5c1 --- /dev/null +++ b/openapi/spec/components/schemas/jwt.yaml @@ -0,0 +1,4 @@ +description: JWT Token +type: string +pattern: ^[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*$ +example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJleGFtcGxlIiwibmFtZSI6ImV4YW1wbGUiLCJpYXQiOjE1MTYyMzkwMjJ9.zqCt70KspnNnitZlv89hDbFZ5iGMMRUn0wFEmmlY-to diff --git a/openapi/spec/components/schemas/maxNamespaces.yaml b/openapi/spec/components/schemas/maxNamespaces.yaml new file mode 100644 index 00000000000..8a8b8a334d0 --- /dev/null +++ b/openapi/spec/components/schemas/maxNamespaces.yaml @@ -0,0 +1,3 @@ +type: number +description: Indicates the maximum number of namespaces a user is allowed to create. If set to 0, the user is not permitted to create any namespaces. If set to -1, the user has no limit on the number of namespaces they can create. +example: 3 diff --git a/openapi/spec/components/schemas/membershipInvitation.yaml b/openapi/spec/components/schemas/membershipInvitation.yaml new file mode 100644 index 00000000000..376170f9946 --- /dev/null +++ b/openapi/spec/components/schemas/membershipInvitation.yaml @@ -0,0 +1,65 @@ +type: object +description: A membership invitation to a namespace +properties: + namespace: + type: object + description: The namespace associated with this invitation + properties: + tenant_id: + description: The namespace tenant ID + type: string + example: "00000000-0000-4000-0000-000000000000" + name: + description: The namespace name + type: string + example: "my-namespace" + required: + - tenant_id + - name + user: + type: object + description: The invited user + properties: + id: + description: The ID of the invited user + type: string + example: "507f1f77bcf86cd799439011" + email: + description: The email of the invited user + type: string + example: "user@example.com" + required: + - id + - email + invited_by: + description: The ID of the user who sent the invitation + type: string + example: "507f1f77bcf86cd799439012" + created_at: + description: When the invitation was created + type: string + format: date-time + updated_at: + description: When the invitation was last updated + type: string + format: date-time + expires_at: + description: When the invitation expires + type: string + format: date-time + nullable: true + status: + description: The current status of the invitation + type: string + enum: + - pending + - accepted + - rejected + - cancelled + example: pending + status_updated_at: + description: When the status was last updated + type: string + format: date-time + role: + $ref: ./namespaceMemberRole.yaml diff --git a/openapi/spec/components/schemas/mfaAuth.yaml b/openapi/spec/components/schemas/mfaAuth.yaml new file mode 100644 index 00000000000..f72b99d9305 --- /dev/null +++ b/openapi/spec/components/schemas/mfaAuth.yaml @@ -0,0 +1,13 @@ +type: object +properties: + token: + description: The `X-MFA-Token` header returned by the authUser endpoint. + type: string + example: bf265bf8-0065-4f44-a3ac-55eb3134c6ec + code: + description: The current code from the MFA authenticator. + type: string + example: '123456' +required: + - token + - code diff --git a/openapi/spec/components/schemas/mfaDisable.yaml b/openapi/spec/components/schemas/mfaDisable.yaml new file mode 100644 index 00000000000..65e310049df --- /dev/null +++ b/openapi/spec/components/schemas/mfaDisable.yaml @@ -0,0 +1,10 @@ +type: object +properties: + code: + description: The code generated by the MFA app. + type: string + example: '123456' + recovery_code: + description: User's recovery code. + type: string + example: 6UIHAIN3CYUEFY5X diff --git a/openapi/spec/components/schemas/mfaEnable.yaml b/openapi/spec/components/schemas/mfaEnable.yaml new file mode 100644 index 00000000000..5d78be18da4 --- /dev/null +++ b/openapi/spec/components/schemas/mfaEnable.yaml @@ -0,0 +1,27 @@ +type: object +properties: + code: + description: The code generated by the MFA app. + type: string + example: '123456' + secret: + description: The secret generated by generateMFA endpoint. + type: string + example: TWBIH44WRHW44B773HJSG3RNZXH4KWSD + recovery_codes: + description: | + A list of codes generated by generateMFA endpoint. These codes can be used when a user loses their MFA app. + type: array + items: + type: string + example: + - MLNSPOB2L2ZRO2D5 + - 35QBUON4JAA4V6KP + - UYH34OY5RNDRRSUS + - IBWGL3IN42LTS3PP + - 6DIGYYGM3JM7GXC4 + - 6UIHAIN3CYUEFY5X +required: + - code + - secret + - recovery_codes diff --git a/openapi/spec/components/schemas/mfaGenerate.yaml b/openapi/spec/components/schemas/mfaGenerate.yaml new file mode 100644 index 00000000000..0998ab65815 --- /dev/null +++ b/openapi/spec/components/schemas/mfaGenerate.yaml @@ -0,0 +1,22 @@ +type: object +properties: + link: + description: The link to establish a connection with the OTP server. + type: string + example: 'otpauth://totp/shellhub-enterprise:662ba312616a7bdb5a2b608d?issuer=shellhub-enterprise&secret=TWBIH44WRHW44B773HJSG3RNZXH4KWSD' + secret: + description: A secret key to authenticate with the OTP server. + type: string + example: TWBIH44WRHW44B773HJSG3RNZXH4KWSD + recovery_codes: + description: A list of recovery codes to use when the user loses access to their MFA app. + type: array + items: + type: string + example: + - MLNSPOB2L2ZRO2D5 + - 35QBUON4JAA4V6KP + - UYH34OY5RNDRRSUS + - IBWGL3IN42LTS3PP + - 6DIGYYGM3JM7GXC4 + - 6UIHAIN3CYUEFY5X diff --git a/openapi/spec/components/schemas/mfaRecover.yaml b/openapi/spec/components/schemas/mfaRecover.yaml new file mode 100644 index 00000000000..9a3e10f1e53 --- /dev/null +++ b/openapi/spec/components/schemas/mfaRecover.yaml @@ -0,0 +1,13 @@ +type: object +properties: + identifier: + description: The same as the login identifier; can be either the user's email or username. + type: string + example: john_doe + recovery_code: + description: One of the user's recovery codes. + type: string + example: '6DIGYYGM3JM7GXC4' +required: + - identifier + - recovery_code diff --git a/openapi/spec/components/schemas/mfaReset.yaml b/openapi/spec/components/schemas/mfaReset.yaml new file mode 100644 index 00000000000..07adbee7951 --- /dev/null +++ b/openapi/spec/components/schemas/mfaReset.yaml @@ -0,0 +1,13 @@ +type: object +properties: + main_email_code: + description: The code sent to the main email address. + type: string + example: JR36Q + recovery_email_code: + description: The code sent to the recovery email address. + type: string + example: AB2D8 +required: + - main_email_code + - recovery_email_code diff --git a/openapi/spec/components/schemas/namespace.yaml b/openapi/spec/components/schemas/namespace.yaml new file mode 100644 index 00000000000..38fc44759bd --- /dev/null +++ b/openapi/spec/components/schemas/namespace.yaml @@ -0,0 +1,94 @@ +type: object +properties: + name: + $ref: namespaceName.yaml + owner: + $ref: userID.yaml + tenant_id: + $ref: namespaceTenantID.yaml + members: + description: Namespace's members + type: array + items: + type: object + properties: + id: + $ref: userID.yaml + added_at: + type: string + format: date-time + description: The time when the member was invited. + example: 2024-09-03T23:17:50.51Z + expires_at: + type: string + format: date-time + description: | + **NOTE: ONLY USED IN CLOUD INSTANCE.** + + The time when the invite expires. If the member is not in + `pending` status, this will be set to the zero UTC time. + example: 0001-01-01T00:00:00Z + role: + $ref: namespaceMemberRole.yaml + default: owner + type: + type: string + enum: + - personal + - team + default: personal + description: | + this field, on majority of cases is default 'personal', if the running + instance of shellhub is cloud, the default value is 'team'. + + This field requires a valid input of either 'personal' or 'team'. the default + will match the current Shellhub instance type. When a "type" field value is + specified, it will override the default, but must be either 'personal' or 'team'. + Any other input will be rejected. + + example: team + status: + type: string + enum: + - accepted + - pending + default: accepted + email: + type: string + description: Member's email. + example: john.doe@test.com + settings: + $ref: namespaceSettings.yaml + max_devices: + description: Namespace's max device numbers + type: integer + minimum: 3 + default: 3 + device_count: + description: Namespace's total devices + type: integer + minimum: 0 + created_at: + description: Namespace's creation date + type: string + format: date-time + example: 2020-05-01T00:00:00.000Z + billing: + description: Namespace's billing + type: object + example: null + devices_pending_count: + description: Number of devices currently in pending status awaiting approval + type: integer + minimum: 0 + example: 2 + devices_accepted_count: + description: Number of devices that have been accepted and are active in the namespace + type: integer + minimum: 0 + example: 23 + devices_rejected_count: + description: Number of devices that have been explicitly rejected from the namespace + type: integer + minimum: 0 + example: 0 diff --git a/openapi/spec/components/schemas/namespaceMemberRole.yaml b/openapi/spec/components/schemas/namespaceMemberRole.yaml new file mode 100644 index 00000000000..5bfbfbc13e4 --- /dev/null +++ b/openapi/spec/components/schemas/namespaceMemberRole.yaml @@ -0,0 +1,7 @@ +description: Namespace's member role +type: string +enum: + - administrator + - operator + - observer + - owner diff --git a/openapi/spec/components/schemas/namespaceName.yaml b/openapi/spec/components/schemas/namespaceName.yaml new file mode 100644 index 00000000000..1c25f8bc6af --- /dev/null +++ b/openapi/spec/components/schemas/namespaceName.yaml @@ -0,0 +1,3 @@ +description: Namespace's name +type: string +example: examplespace diff --git a/openapi/spec/components/schemas/namespaceSettings.yaml b/openapi/spec/components/schemas/namespaceSettings.yaml new file mode 100644 index 00000000000..4ab003b918d --- /dev/null +++ b/openapi/spec/components/schemas/namespaceSettings.yaml @@ -0,0 +1,14 @@ +description: Namespace's settings. +type: object +properties: + session_record: + description: The session records define when the namespace should save or not record a session. This can be used to check logged activity when connecting to a device. + type: boolean + example: true + connection_announcement: + description: A connection announcement is a custom string written during a session when a connection is established on a device within the namespace. + type: string + minLength: 0 + maxLength: 4096 + format: alphanumunicode + example: my awesome connection announcement diff --git a/openapi/spec/components/schemas/namespaceTenantID.yaml b/openapi/spec/components/schemas/namespaceTenantID.yaml new file mode 100644 index 00000000000..65c895b3203 --- /dev/null +++ b/openapi/spec/components/schemas/namespaceTenantID.yaml @@ -0,0 +1,4 @@ +description: Namespace's tenant ID +type: string +format: uuid +example: 3dd0d1f8-8246-4519-b11a-a3dd33717f65 diff --git a/openapi/spec/components/schemas/publicKeyData.yaml b/openapi/spec/components/schemas/publicKeyData.yaml new file mode 100644 index 00000000000..68889482417 --- /dev/null +++ b/openapi/spec/components/schemas/publicKeyData.yaml @@ -0,0 +1,7 @@ +description: | + Public key's data. + + The `data` field receives the public key enconded as `base64` string. +type: string +pattern: ^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$ +example: c3NoLXJzYSBBQUFBQjNOemFDMXljMkVBQUFBREFRQUJBQUFCQVFDWWdqRkNQUWdPejBEZ0VQQUh3blEyMGYzRUlGYjd2SkNtd1YxR25uRTU2K0htaGgyY295c3o5MnZqMW9GeElxQUlKZUZxU3lQNWwzbDZjbkFUVmxhZ2MxR21OQm5vQ0NZSlpicXdOVUFiM3RMTXdiOXBaSGVWMFczWVl4OERBSVVsL2ZYaVVhQTNpQk5BcTFrczFzYjZjbVN1VmYwTVNTSjdoOXU3c2Y2RnkyVmQ0U1FqSGd3YmNvSUY1Q0kyWkZlMEx6NTNWeGQwVlZRZG5ISGNBeldRVFlTMDIxcmVXeG5QR2RRdytmWXpCRWdRMG5sTmFzQXBRc1pVUXRPZ0t4TlNFcVJ0VnJiRUR4WisrTllQaWFuNUdSZ0huZWNUUzBaVGNjZjM4SDZYTms1Qm5XWGlEN2RCWlJBRnZ1UjBkWEF1cU9mYUM3SVl5MVJnS1lkdEsrUnY= diff --git a/openapi/spec/components/schemas/publicKeyFilter.yaml b/openapi/spec/components/schemas/publicKeyFilter.yaml new file mode 100644 index 00000000000..dc5763c8742 --- /dev/null +++ b/openapi/spec/components/schemas/publicKeyFilter.yaml @@ -0,0 +1,29 @@ +description: | + Public key's filter rule. + + + The `filter`` rule defines how if the public key is valid to a device. + + - When `hostname` object is set, the public key will be used in a device what matches with hostname. + - When `tags` object is set, it matches the device what contains at least one of that tags. +oneOf: + - type: object + properties: + hostname: + description: Public key's regex hostname. + type: string + example: .* + required: + - hostname + - type: object + properties: + tags: + description: Public key's tags. + type: array + items: + $ref: tag.yaml + minItems: 1 + maxItems: 3 + uniqueItems: true + required: + - tags diff --git a/openapi/spec/components/schemas/publicKeyRequest.yaml b/openapi/spec/components/schemas/publicKeyRequest.yaml new file mode 100644 index 00000000000..b5364ae7cd4 --- /dev/null +++ b/openapi/spec/components/schemas/publicKeyRequest.yaml @@ -0,0 +1,17 @@ +type: object +properties: + data: + $ref: publicKeyData.yaml + filter: + $ref: publicKeyFilter.yaml + name: + description: Public key's name. + type: string + example: example + username: + $ref: publicKeyUsername.yaml +required: + - data + - filter + - name + - username diff --git a/openapi/spec/components/schemas/publicKeyResponse.yaml b/openapi/spec/components/schemas/publicKeyResponse.yaml new file mode 100644 index 00000000000..d967f474ef3 --- /dev/null +++ b/openapi/spec/components/schemas/publicKeyResponse.yaml @@ -0,0 +1,21 @@ +type: object +properties: + data: + $ref: publicKeyData.yaml + fingerprint: + $ref: publickKeyFingerprint.yaml + created_at: + description: Public key's creation date. + type: string + format: date-time + example: 2020-05-01T00:00:00.000Z + tenant_id: + $ref: namespaceTenantID.yaml + name: + description: Public key's name. + type: string + example: example + filter: + $ref: publicKeyFilter.yaml + username: + $ref: publicKeyUsername.yaml diff --git a/openapi/spec/components/schemas/publicKeyUsername.yaml b/openapi/spec/components/schemas/publicKeyUsername.yaml new file mode 100644 index 00000000000..0bfea8bfdec --- /dev/null +++ b/openapi/spec/components/schemas/publicKeyUsername.yaml @@ -0,0 +1,7 @@ +description: | + Public key's regex username. + + + The `username` field define which user, in the device, may be access through this public key. +type: string +example: '.*' diff --git a/openapi/spec/components/schemas/publickKeyFingerprint.yaml b/openapi/spec/components/schemas/publickKeyFingerprint.yaml new file mode 100644 index 00000000000..ad73a3e40fa --- /dev/null +++ b/openapi/spec/components/schemas/publickKeyFingerprint.yaml @@ -0,0 +1,4 @@ +description: Public key's fingerprint. +type: string +pattern: ^([0-9a-f]{2}:){15}[0-9a-f]{2}$ +example: 48:6e:fc:94:01:01:74:57:eb:57:49:91:15:e4:9c:7a diff --git a/openapi/spec/components/schemas/query.yaml b/openapi/spec/components/schemas/query.yaml new file mode 100644 index 00000000000..6b1dc243f3c --- /dev/null +++ b/openapi/spec/components/schemas/query.yaml @@ -0,0 +1,8 @@ +type: object +properties: + page: + description: Device list page + type: integer + per_page: + description: Device number per page + type: integer diff --git a/openapi/spec/components/schemas/recordedSessionResponse.yaml b/openapi/spec/components/schemas/recordedSessionResponse.yaml new file mode 100644 index 00000000000..c82f6a787d0 --- /dev/null +++ b/openapi/spec/components/schemas/recordedSessionResponse.yaml @@ -0,0 +1,42 @@ +type: array +items: + type: object + properties: + uid: + description: Session's ID + type: string + minLength: 64 + maxLength: 64 + example: 50d858e0985ecc7f60418aaf0cc5ab587f42c2570a884095a9e8ccacd0f6545c + seat: + description: Session's Seat + type: integer + minimum: 0 + message: + description: Session's Data + type: string + tenant_id: + $ref: namespaceTenantID.yaml + time: + description: Session's time + type: string + format: date-time + example: 2020-05-01T00:00:00.000Z + width: + description: Session's terminal width + type: integer + minimum: 0 + example: 24 + height: + description: Session's terminal height + type: integer + minimum: 0 + example: 111 + required: + - uid + - seat + - message + - tenant_id + - time + - width + - height diff --git a/openapi/spec/components/schemas/security.yaml b/openapi/spec/components/schemas/security.yaml new file mode 100644 index 00000000000..856d3f090de --- /dev/null +++ b/openapi/spec/components/schemas/security.yaml @@ -0,0 +1,12 @@ +jwt: + type: http + scheme: bearer + bearerFormat: JWT + description: JSON Web Token for authentication. +api-key: + type: apiKey + in: header + name: X-API-KEY + description: | + An API key is an alternative to the standard JWT authentication. + Authentication with this method is namespace-related and is not tied to any user. diff --git a/openapi/spec/components/schemas/session.yaml b/openapi/spec/components/schemas/session.yaml new file mode 100644 index 00000000000..e9d40092f27 --- /dev/null +++ b/openapi/spec/components/schemas/session.yaml @@ -0,0 +1,99 @@ +# Session object containing information about an active or completed session +type: object +properties: + uid: + $ref: sessionUID.yaml + device_uid: + $ref: deviceUID.yaml + device: + $ref: device.yaml + tenant_id: + $ref: namespaceTenantID.yaml + username: + description: Session's username + type: string + ip_address: + description: Session's IP address + type: string + pattern: ^[0-9]{1,3}(\.[0-9]{1,3}){3}$ + example: 127.0.0.1 + started_at: + description: Session's started date + type: string + example: 2020-01-01T00:00:00Z + last_seen: + description: Session's last seen date + type: string + example: 2020-01-01T00:00:00Z + active: + description: Session's active status + type: boolean + authenticated: + description: Session's authenticated status + type: boolean + recorded: + description: Session's recorded status + type: boolean + type: + description: Session's type + type: string + enum: + - web + - term + example: web + term: + description: Session's terminal + type: string + example: xterm.js + position: + description: Session's geolocation position + type: object + properties: + latitude: + description: Session's latitude position + type: number + example: -31.7566628 + longitude: + description: Session's longitude position + type: number + example: -52.322474 + events: + description: Session's events + type: object + properties: + types: + description: Session's set of types + type: array + items: + type: string + example: shell + seats: + description: Session's seats + type: array + items: + type: integer + minimum: 0 + example: + - 0 + items: + description: Session's list of events + type: array + items: + type: object + properties: + type: + description: The type of the event + type: string + example: 'shell' + timestamp: + description: The time the event occurred in ISO 8601 format + type: string + format: date-time + example: '2023-10-01T12:00:00Z' + data: + description: Additional data related to the event + type: object + seat: + description: Seat where the event happened + type: integer + minimum: 0 diff --git a/openapi/spec/components/schemas/sessionUID.yaml b/openapi/spec/components/schemas/sessionUID.yaml new file mode 100644 index 00000000000..326dd0c5f87 --- /dev/null +++ b/openapi/spec/components/schemas/sessionUID.yaml @@ -0,0 +1,4 @@ +description: Session's UID +type: string +pattern: ^[0-9a-f]{64}$ +example: 13b0c8ea878e61ff849db69461795006a9594c8f6a6390ce0000100b0c9d7d0a diff --git a/openapi/spec/components/schemas/support.yaml b/openapi/spec/components/schemas/support.yaml new file mode 100644 index 00000000000..7f39dafd99c --- /dev/null +++ b/openapi/spec/components/schemas/support.yaml @@ -0,0 +1,7 @@ +type: object +properties: + identifier: + description: Support identifier. + type: string + example: '1a1e64452ebba63e39983aa05ea176a31072241af66da72559a539af91d00f52' + pattern: '^[a-fA-F0-9]{64}$' diff --git a/openapi/spec/components/schemas/tag.yaml b/openapi/spec/components/schemas/tag.yaml new file mode 100644 index 00000000000..29ccd009a1d --- /dev/null +++ b/openapi/spec/components/schemas/tag.yaml @@ -0,0 +1,10 @@ +description: | + A tag represents a label or category that can be attached to devices, + firewall rules and public keys for organization and filtering purposes. +type: object +properties: + name: + type: string + description: The display name of the tag + minLength: 3 + maxLength: 255 diff --git a/openapi/spec/components/schemas/tunnel.yaml b/openapi/spec/components/schemas/tunnel.yaml new file mode 100644 index 00000000000..179038d8039 --- /dev/null +++ b/openapi/spec/components/schemas/tunnel.yaml @@ -0,0 +1,28 @@ +type: object +properties: + address: + $ref: tunnelAddress.yaml + full_address: + description: Full tunnel address including domain + type: string + example: '9a8df9321368d567cfac8679cec7848c.localhost' + namespace: + $ref: namespaceTenantID.yaml + device: + $ref: deviceUID.yaml + host: + $ref: tunnelHost.yaml + port: + $ref: tunnelPort.yaml + ttl: + $ref: tunnelTTL.yaml + expires_in: + description: Tunnel's expiration date + type: string + format: date-time + example: 2020-05-01T00:01:00.000Z + created_at: + description: Tunnel's creation date + type: string + format: date-time + example: 2020-05-01T00:00:00.000Z diff --git a/openapi/spec/components/schemas/tunnelAddress.yaml b/openapi/spec/components/schemas/tunnelAddress.yaml new file mode 100644 index 00000000000..5f35067fcdc --- /dev/null +++ b/openapi/spec/components/schemas/tunnelAddress.yaml @@ -0,0 +1,4 @@ +description: Tunnel's unique address +type: string +pattern: '^[a-f0-9]{32}$' +example: '9a8df9321368d567cfac8679cec7848c' diff --git a/openapi/spec/components/schemas/tunnelFullAddress.yaml b/openapi/spec/components/schemas/tunnelFullAddress.yaml new file mode 100644 index 00000000000..5896f4af4d0 --- /dev/null +++ b/openapi/spec/components/schemas/tunnelFullAddress.yaml @@ -0,0 +1,4 @@ +description: Tunnel's full address +type: string +pattern: "^[a-f0-9]{32}\\.[a-zA-Z0-9-]+(?:\\.[a-zA-Z0-9-]+)*$" +example: '9a8df9321368d567cfac8679cec7848c.example.com' diff --git a/openapi/spec/components/schemas/tunnelHost.yaml b/openapi/spec/components/schemas/tunnelHost.yaml new file mode 100644 index 00000000000..567245543ae --- /dev/null +++ b/openapi/spec/components/schemas/tunnelHost.yaml @@ -0,0 +1,4 @@ +description: Tunnel's agent host address +type: string +format: ip +example: 127.0.0.1 diff --git a/openapi/spec/components/schemas/tunnelPort.yaml b/openapi/spec/components/schemas/tunnelPort.yaml new file mode 100644 index 00000000000..9afad5556a9 --- /dev/null +++ b/openapi/spec/components/schemas/tunnelPort.yaml @@ -0,0 +1,6 @@ +description: Tunnel's agent port number +type: integer +format: int32 +minimum: 1 +maximum: 65535 +example: 8080 diff --git a/openapi/spec/components/schemas/tunnelTTL.yaml b/openapi/spec/components/schemas/tunnelTTL.yaml new file mode 100644 index 00000000000..f9237700a80 --- /dev/null +++ b/openapi/spec/components/schemas/tunnelTTL.yaml @@ -0,0 +1,5 @@ +description: Tunnel's time to live in seconds +type: integer +minimum: -1 +maximum: 9223372036 +example: 60 diff --git a/openapi/spec/components/schemas/user.yaml b/openapi/spec/components/schemas/user.yaml new file mode 100644 index 00000000000..df19f8e4be9 --- /dev/null +++ b/openapi/spec/components/schemas/user.yaml @@ -0,0 +1,17 @@ +type: object +properties: + name: + $ref: userName.yaml + email: + $ref: userEmail.yaml + username: + $ref: userUsername.yaml + password: + $ref: userPassword.yaml + email_marketing: + $ref: userMarketing.yaml +required: + - name + - email + - username + - password diff --git a/openapi/spec/components/schemas/userAdminRequest.yaml b/openapi/spec/components/schemas/userAdminRequest.yaml new file mode 100644 index 00000000000..7908578e44e --- /dev/null +++ b/openapi/spec/components/schemas/userAdminRequest.yaml @@ -0,0 +1,25 @@ +type: object +properties: + name: + $ref: userName.yaml + email: + $ref: userEmail.yaml + username: + $ref: userUsername.yaml + password: + description: User's password. + type: string + maxLength: 30 + example: example + confirmed: + $ref: userConfirmedEmail.yaml + admin: + description: User's admin status. + type: boolean + max_namespaces: + $ref: maxNamespaces.yaml +required: + - name + - email + - username + - password diff --git a/openapi/spec/components/schemas/userAdminResponse.yaml b/openapi/spec/components/schemas/userAdminResponse.yaml new file mode 100644 index 00000000000..ea520cb680f --- /dev/null +++ b/openapi/spec/components/schemas/userAdminResponse.yaml @@ -0,0 +1,33 @@ +type: object +properties: + id: + $ref: userID.yaml + namespaces: + description: User's integer of owned namespaces. + type: integer + minimum: 0 + confirmed: + description: User's confirmation. + type: boolean + admin: + description: User's admin status. + type: boolean + created_at: + description: User's creating date. + type: string + format: date-format + last_login: + description: User's last login date. + type: string + format: date-format + name: + $ref: userName.yaml + email: + $ref: userEmail.yaml + username: + $ref: userUsername.yaml + password: + description: User's hashed password. + type: string + minLength: 64 + maxLength: 64 diff --git a/openapi/spec/components/schemas/userAuth.yaml b/openapi/spec/components/schemas/userAuth.yaml new file mode 100644 index 00000000000..776f040c761 --- /dev/null +++ b/openapi/spec/components/schemas/userAuth.yaml @@ -0,0 +1,34 @@ +type: object +properties: + token: + $ref: jwt.yaml + id: + $ref: userID.yaml + origin: + $ref: userOrigin.yaml + user: + $ref: userUsername.yaml + name: + $ref: userName.yaml + email: + $ref: userEmail.yaml + recovery_email: + description: | + The recovery email serves as the user's final recourse to regain access + to their account. + type: string + format: email + tenant: + $ref: namespaceTenantID.yaml + role: + $ref: namespaceMemberRole.yaml + mfa: + type: boolean + description: Indicates whether the user has MFA enabled. + example: false + admin: + type: boolean + description: Indicates whether the user has admin status. + example: false + max_namespaces: + $ref: maxNamespaces.yaml diff --git a/openapi/spec/components/schemas/userConfirmedEmail.yaml b/openapi/spec/components/schemas/userConfirmedEmail.yaml new file mode 100644 index 00000000000..791d88f9bbd --- /dev/null +++ b/openapi/spec/components/schemas/userConfirmedEmail.yaml @@ -0,0 +1,4 @@ +description: User's email confirmed. +type: boolean +example: true +default: false diff --git a/openapi/spec/components/schemas/userEmail.yaml b/openapi/spec/components/schemas/userEmail.yaml new file mode 100644 index 00000000000..2e2b2cafbb7 --- /dev/null +++ b/openapi/spec/components/schemas/userEmail.yaml @@ -0,0 +1,4 @@ +description: User's E-mail. +type: string +format: email +example: example@example.com diff --git a/openapi/spec/components/schemas/userID.yaml b/openapi/spec/components/schemas/userID.yaml new file mode 100644 index 00000000000..4b1e68ebaf2 --- /dev/null +++ b/openapi/spec/components/schemas/userID.yaml @@ -0,0 +1,4 @@ +description: User's ID. +type: string +pattern: ^[0-9a-f]{24}$ +example: 507f1f77bcf86cd799439011 diff --git a/openapi/spec/components/schemas/userMarketing.yaml b/openapi/spec/components/schemas/userMarketing.yaml new file mode 100644 index 00000000000..b459e4a54f1 --- /dev/null +++ b/openapi/spec/components/schemas/userMarketing.yaml @@ -0,0 +1,3 @@ +description: User's email marketing option. +type: 'boolean' +example: true diff --git a/openapi/spec/components/schemas/userName.yaml b/openapi/spec/components/schemas/userName.yaml new file mode 100644 index 00000000000..d9b70f340f1 --- /dev/null +++ b/openapi/spec/components/schemas/userName.yaml @@ -0,0 +1,5 @@ +description: User's name. +type: string +minLength: 3 +maxLength: 20 +example: example diff --git a/openapi/spec/components/schemas/userOrigin.yaml b/openapi/spec/components/schemas/userOrigin.yaml new file mode 100644 index 00000000000..4678ea61224 --- /dev/null +++ b/openapi/spec/components/schemas/userOrigin.yaml @@ -0,0 +1,3 @@ +description: Specifies the method the user employed to register with ShellHub. +type: string +enum: [local] diff --git a/openapi/spec/components/schemas/userPassword.yaml b/openapi/spec/components/schemas/userPassword.yaml new file mode 100644 index 00000000000..a5603f0e685 --- /dev/null +++ b/openapi/spec/components/schemas/userPassword.yaml @@ -0,0 +1,5 @@ +description: User's password. +type: string +minLength: 5 +maxLength: 30 +example: example diff --git a/openapi/spec/components/schemas/userUsername.yaml b/openapi/spec/components/schemas/userUsername.yaml new file mode 100644 index 00000000000..521e0459501 --- /dev/null +++ b/openapi/spec/components/schemas/userUsername.yaml @@ -0,0 +1,6 @@ +description: User's username. +type: string +minLength: 3 +maxLength: 30 +pattern: ^[a-zA-Z0-9-_.@]{3,30}$ +example: example diff --git a/openapi/spec/components/schemas/webendpoint.yaml b/openapi/spec/components/schemas/webendpoint.yaml new file mode 100644 index 00000000000..fc8e3e56109 --- /dev/null +++ b/openapi/spec/components/schemas/webendpoint.yaml @@ -0,0 +1,32 @@ +type: object +properties: + address: + $ref: webendpointAddress.yaml + full_address: + description: Full webendpoint address including domain + type: string + example: '9a8df9321368d567cfac8679cec7848c.localhost' + namespace: + $ref: namespaceTenantID.yaml + device_uid: + $ref: deviceUID.yaml + device: + $ref: device.yaml + host: + $ref: webendpointHost.yaml + port: + $ref: webendpointPort.yaml + ttl: + $ref: webendpointTTL.yaml + tls: + $ref: webendpointTLS.yaml + expires_in: + description: Web endpoint's expiration date + type: string + format: date-time + example: 2020-05-01T00:01:00.000Z + created_at: + description: Web endpoint's creation date + type: string + format: date-time + example: 2020-05-01T00:00:00.000Z diff --git a/openapi/spec/components/schemas/webendpointAddress.yaml b/openapi/spec/components/schemas/webendpointAddress.yaml new file mode 100644 index 00000000000..15c3b30adc4 --- /dev/null +++ b/openapi/spec/components/schemas/webendpointAddress.yaml @@ -0,0 +1,4 @@ +description: Web endpoint's unique address +type: string +pattern: '^[a-f0-9]{32}$' +example: '9a8df9321368d567cfac8679cec7848c' diff --git a/openapi/spec/components/schemas/webendpointFullAddress.yaml b/openapi/spec/components/schemas/webendpointFullAddress.yaml new file mode 100644 index 00000000000..173f6e702d8 --- /dev/null +++ b/openapi/spec/components/schemas/webendpointFullAddress.yaml @@ -0,0 +1,4 @@ +description: Web endpoint's full address +type: string +pattern: "^[a-f0-9]{32}\\.[a-zA-Z0-9-]+(?:\\.[a-zA-Z0-9-]+)*$" +example: '9a8df9321368d567cfac8679cec7848c.example.com' diff --git a/openapi/spec/components/schemas/webendpointHost.yaml b/openapi/spec/components/schemas/webendpointHost.yaml new file mode 100644 index 00000000000..164d2a30f2d --- /dev/null +++ b/openapi/spec/components/schemas/webendpointHost.yaml @@ -0,0 +1,4 @@ +description: Web endpoint's agent host address +type: string +format: ip +example: 127.0.0.1 diff --git a/openapi/spec/components/schemas/webendpointPort.yaml b/openapi/spec/components/schemas/webendpointPort.yaml new file mode 100644 index 00000000000..874c69b6cd9 --- /dev/null +++ b/openapi/spec/components/schemas/webendpointPort.yaml @@ -0,0 +1,6 @@ +description: Web endpoint's agent port number +type: integer +format: int32 +minimum: 1 +maximum: 65535 +example: 8080 diff --git a/openapi/spec/components/schemas/webendpointTLS.yaml b/openapi/spec/components/schemas/webendpointTLS.yaml new file mode 100644 index 00000000000..c55108c04af --- /dev/null +++ b/openapi/spec/components/schemas/webendpointTLS.yaml @@ -0,0 +1,19 @@ +type: object +description: Web endpoint TLS configuration +properties: + enabled: + type: boolean + description: Whether TLS is enabled for this web endpoint + example: false + verify: + type: boolean + description: Whether to verify the TLS certificate + example: false + domain: + type: string + description: Domain for TLS verification + example: "example.com" +required: + - enabled + - verify + - domain diff --git a/openapi/spec/components/schemas/webendpointTTL.yaml b/openapi/spec/components/schemas/webendpointTTL.yaml new file mode 100644 index 00000000000..1af9946566f --- /dev/null +++ b/openapi/spec/components/schemas/webendpointTTL.yaml @@ -0,0 +1,5 @@ +description: Web endpoint's time to live in seconds +type: integer +minimum: -1 +maximum: 9223372036 +example: 60 diff --git a/openapi/spec/enterprise-openapi.yaml b/openapi/spec/enterprise-openapi.yaml new file mode 100644 index 00000000000..33d39133f4e --- /dev/null +++ b/openapi/spec/enterprise-openapi.yaml @@ -0,0 +1,113 @@ +openapi: 3.0.3 +info: + title: ShellHub Enterprise OpenAPI + description: | + > NOTICE: THE API IS NOT STABLE YET; ERROR AND INCONSISTENCIES MAY OCCUR. + + ShellHub Enterprise OpenAPI specification. + + It documents all routes provided by ShellHub Enterprise. + contact: + email: contato@ossystems.com.br + license: + name: Apache License 2.0 + url: https://github.com/shellhub-io/shellhub/blob/master/LICENSE.md + version: 0.20.0 +servers: + - url: / + description: ShellHub server as a Enterprise instance. +tags: + - name: cloud + description: Routes provided by ShellHub Cloud API. + - name: authentication-settings + description: | + Defines the routes to configure and retrieve the ShellHub authentication + settings. Currently, ShellHub supports both local authentication (email + and password) and SAML-based authentication (Single Sign-On). + - name: admin + description: Routes provide by ShellHub Admin API. + - name: users + description: Routes related to user resource. + - name: stats + description: Routes related to stats resource. + - name: license + description: Routes related to license resource. + - name: sessions + description: Routes related to session resource. + - name: users + description: Routes related to users resource. + - name: rules + description: Routes related to firewall rules resource. + - name: api-keys + description: | + An API key is a unique identifier used to access protected endpoints. It + has a defined lifespan, is associated with a namespace, and cannot be + used to authenticate user routes. Typically, it replaces login-based + authentication when automating processes. + + To utilize an API key, it must be included in the `X-API-KEY` header. API + keys are preferred over JWT tokens and will be used even if one is + provided. + + Except for `GET` endpoints, API key-related routes cannot be + authenticated with an API key. + - name: tunnels + description: Routes related to tunnels resource. + - name: web-endpoints + description: Routes related to web-endpoints resource. + +components: + securitySchemes: + $ref: ./components/schemas/security.yaml +paths: + $ref: cloud-openapi.yaml#/paths + /admin/api/authentication: + $ref: paths/admin@api@authentication.yaml + /admin/api/authentication/local: + $ref: paths/admin@api@authentication@local.yaml + /admin/api/authentication/saml: + $ref: paths/admin@api@authentication@saml.yaml + /admin/api/devices: + $ref: paths/admin@api@devices.yaml + /admin/api/devices/{uid}: + $ref: paths/admin@api@devices@{uid}.yaml + /admin/api/devices/{uid}/{status}: + $ref: paths/admin@api@devices@{uid}@{status}.yaml + /admin/api/firewall/rules: + $ref: paths/admin@api@firewall@rules.yaml + /admin/api/firewall/rules/{id}: + $ref: paths/admin@api@firewall@rules@{id}.yaml + /admin/api/sessions: + $ref: paths/admin@api@sessions.yaml + /admin/api/sessions/{uid}: + $ref: paths/admin@api@sessions@{uid}.yaml + /admin/api/sshkeys/public-keys: + $ref: paths/admin@api@sshkeys@public-keys.yaml + /admin/api/namespaces: + $ref: paths/admin@api@namespaces.yaml + /admin/api/namespaces/{tenant}: + $ref: paths/admin@api@namespaces@{tenant}.yaml + /admin/api/namespaces-update/{tenantID}: + $ref: paths/admin@api@namespaces-update@{tenantID}.yaml + /admin/api/login: + $ref: paths/admin@api@login.yaml + /admin/api/users: + $ref: paths/admin@api@users.yaml + /admin/api/users/{id}: + $ref: paths/admin@api@users@{id}.yaml + /admin/api/users/{id}/password/reset: + $ref: paths/admin@api@users@{id}@password@reset.yaml + /admin/api/stats: + $ref: paths/admin@api@stats.yaml + /admin/api/license: + $ref: paths/admin@api@license.yaml + /admin/api/auth/token/{id}: + $ref: paths/admin@api@auth@token@{id}.yaml + /admin/api/export/namespaces: + $ref: paths/admin@api@export@namespaces.yaml + /admin/api/export/users: + $ref: paths/admin@api@export@users.yaml + /admin/api/announcements: + $ref: paths/admin@api@announcements.yaml + /admin/api/announcements/{uuid}: + $ref: paths/admin@api@announcements@{uuid}.yaml diff --git a/openapi/spec/openapi.yaml b/openapi/spec/openapi.yaml new file mode 100644 index 00000000000..e450e88f66c --- /dev/null +++ b/openapi/spec/openapi.yaml @@ -0,0 +1,309 @@ +openapi: 3.0.3 +info: + title: ShellHub OpenAPI + description: | + > THE API IS NOT STABLE YET; ERROR AND INCONSISTENCIES MAY OCCUR. + + This is the OpenAPI specification for ShellHub community version. It documents the parameters and bodies for + performs HTTP requests to the ShellHub server endpoints related to users, namespaces, members, devices, tags, SSH, + sessions, etc. + + These endpoints require a JSON Web Token (JWT) as its security scheme, that means you need to send, to almost each + request, an HTTP header called `Authorization` with the `bearer` token. To obtains this token, uses the `/api/login` + route, fulfilling its request body to return that token with some essential information about the user whom logged + in. + contact: + name: ShellHub contact address. + email: contato@ossystems.com.br + license: + name: Apache License 2.0 + url: https://github.com/shellhub-io/shellhub/blob/master/LICENSE.md + version: 0.20.0 +servers: + - url: / + description: ShellHub server. +components: + securitySchemes: + jwt: + type: http + scheme: bearer + bearerFormat: JWT + description: JSON Web Token for authentication. + api-key: + type: apiKey + in: header + name: X-API-KEY + description: | + An API key is an alternative to the standard JWT authentication. + Authentication with this method is namespace-related and is not tied to any user. +tags: + - name: internal + description: Requests executed internally by ShellHub server. + - name: external + description: Requests executed by the ShellHub user. + - name: community + description: Routes provided by ShellHub Community API. + - name: enterprise + description: Routes provided by ShellHub Enterprise API. + - name: cloud + description: Routes provided by ShellHub Cloud API. + - name: users + description: Routes related to user resource. + - name: devices + description: Routes related to device resource. + - name: containers + description: Routes related to containers resource. + - name: ssh + description: Routes related to SSH resource. + - name: api-keys + description: | + An API key is a unique identifier used to access protected endpoints. It + has a defined lifespan, is associated with a namespace, and cannot be + used to authenticate user routes. Typically, it replaces login-based + authentication when automating processes. + + To utilize an API key, it must be included in the `X-API-KEY` header. API + keys are preferred over JWT tokens and will be used even if one is + provided. + + Except for `GET` endpoints, API key-related routes cannot be + authenticated with an API key. + - name: system + description: Routes related to running instance. + - name: sessions + description: Routes related to session resource. + - name: authentication-settings + description: | + Defines the routes to configure and retrieve the ShellHub authentication + settings. Currently, ShellHub supports both local authentication (email + and password) and SAML-based authentication (Single Sign-On). + - name: admin + description: Routes provide by ShellHub Admin API. + - name: stats + description: Routes related to stats resource. + - name: license + description: Routes related to license resource. + - name: rules + description: Routes related to firewall rules resource. + - name: tunnels + description: Routes related to tunnels resource. + - name: web-endpoints + description: Routes related to web-endpoints resource. + - name: announcements + description: Routes related to announcements resource + - name: mfa + description: Routes related to MFA + +paths: + /info: + $ref: paths/api@info.yaml + /api/login: + $ref: paths/api@login.yaml + /api/auth/user: + $ref: paths/api@auth@user.yaml + /api/auth/ssh: + $ref: paths/api@auth@ssh.yaml + /api/auth/token/{tenant}: + $ref: paths/api@auth@token@{tenant}.yaml + /api/token/{tenant}: + $ref: paths/api@token@{tenant}.yaml + /api/devices/{uid}/accept: + $ref: paths/api@devices@{uid}@accept.yaml + /api/users: + $ref: paths/api@users.yaml + /api/users/{id}/data: + $ref: paths/api@users@{id}@data.yaml + /api/users/{id}/password: + $ref: paths/api@users@{id}@password.yaml + /api/users/security/{tenant}: + $ref: paths/api@users@security@{tenant}.yaml + /api/users/security: + $ref: paths/api@users@security.yaml + /api/devices: + $ref: paths/api@devices.yaml + /api/devices/{uid}: + $ref: paths/api@devices@{uid}.yaml + /api/devices/resolve: + $ref: paths/api@devices@resolve.yaml + /api/devices/{uid}/{status}: + $ref: paths/api@devices@{uid}@{status}.yaml + /internal/devices/{uid}/offline: + $ref: paths/api@devices@{uid}@offline.yaml + /api/sessions: + $ref: paths/api@sessions.yaml + /api/sessions/{uid}: + $ref: paths/api@sessions@{uid}.yaml + /api/sshkeys/public-keys: + $ref: paths/api@sshkeys@public-keys.yaml + /api/sshkeys/public-keys/{fingerprint}: + $ref: paths/api@sshkeys@public-keys@{fingerprint}.yaml + /api/stats: + $ref: paths/api@stats.yaml + /api/namespaces: + $ref: paths/api@namespaces.yaml + /api/namespaces/{tenant}: + $ref: paths/api@namespaces@{tenant}.yaml + /api/namespaces/{tenant}/members: + $ref: paths/api@namespaces@{tenant}@members.yaml + /api/namespaces/{tenant}/members/{uid}: + $ref: paths/api@namespaces@{tenant}@members@{uid}.yaml + /api/namespaces/api-key: + $ref: paths/api@namespaces@api-key.yaml + /api/namespaces/api-key/{key}: + $ref: paths/api@namespaces@api-key@{key}.yaml + /api/tags: + $ref: paths/api@tags.yaml + /api/tags/{name}: + $ref: paths/api@tags@{name}.yaml + /api/devices/{uid}/tags/{name}: + $ref: paths/api@devices@{uid}@tags@{name}.yaml + /api/namespaces/{tenant}/tags: + $ref: paths/api@namespaces@{tenant}@tags.yaml + /api/namespaces/{tenant}/tags/{name}: + $ref: paths/api@namespaces@{tenant}@tags@{name}.yaml + /api/namespaces/{tenant}/devices/{uid}/tags/{name}: + $ref: paths/api@namespaces@{tenant}@devices@{uid}@tags@{name}.yaml + /api/namespaces/{tenant}/containers/{uid}/tags/{name}: + $ref: paths/api@namespaces@{tenant}@containers@{uid}@tags@{name}.yaml + /api/announcements: + $ref: paths/api@announcements.yaml + /api/announcements/{uuid}: + $ref: paths/api@announcements@{uuid}.yaml + /api/containers: + $ref: paths/api@containers.yaml + /api/containers/{uid}: + $ref: paths/api@containers@{uid}.yaml + /api/containers/{uid}/{status}: + $ref: paths/api@containers@{uid}@{status}.yaml + /api/setup: + $ref: paths/api@setup.yaml + /api/sessions/{uid}/close: + $ref: paths/api@sessions@{uid}@close.yaml + /api/sessions/{uid}/records/{seat}: + $ref: paths/api@sessions@{uid}@records@{seat}.yaml + /api/firewall/rules: + $ref: paths/api@firewall@rules.yaml + /api/firewall/rules/{id}: + $ref: paths/api@firewall@rules@{id}.yaml + /api/register: + $ref: paths/api@register.yaml + /api/user/resend_email: + $ref: paths/api@user@resend_email.yaml + /api/user/{uid}/update_password: + $ref: paths/api@user@{uid}@update_password.yaml + /api/user/validation_account: + $ref: paths/api@user@validation_account.yaml + /api/user/recover_password: + $ref: paths/api@user@recover_password.yaml + /api/billing/customer: + $ref: paths/api@billing@customer.yaml + /api/billing/subscription: + $ref: paths/api@billing@subscription.yaml + /api/billing/paymentmethod/attach: + $ref: paths/api@billing@paymentmethod@attach.yaml + /api/billing/paymentmethod/detach: + $ref: paths/api@billing@paymentmethod@detach.yaml + /api/billing/paymentmethod/default: + $ref: paths/api@billing@paymentmethod@default.yaml + /api/billing/device-choice: + $ref: paths/api@billing@device-choice.yaml + /api/billing/devices-most-used: + $ref: paths/api@billing@devices-most-used.yaml + /api/billing/report: + $ref: paths/api@billing@report.yaml + /api/billing/evaluate: + $ref: paths/api@billing@evaluate.yaml + /api/user/saml/auth: + $ref: paths/api@user@saml@auth.yaml + /api/user/mfa/auth: + $ref: paths/api@user@mfa@auth.yaml + /api/user/mfa/recover: + $ref: paths/api@user@mfa@recovery.yaml + /api/user/mfa/reset: + $ref: paths/api@user@mfa@reset.yaml + /api/user/mfa/generate: + $ref: paths/api@user@mfa@generate.yaml + /api/user/mfa/enable: + $ref: paths/api@user@mfa@enable.yaml + /api/user/mfa/disable: + $ref: paths/api@user@mfa@disable.yaml + /api/user/mfa/reset/{user-id}: + $ref: paths/api@user@mfa@reset@{user-id}.yaml + /api/user: + $ref: paths/api@user.yaml + # Lookup user status + # TODO: rename this endpoint + /api/namespaces/{tenant}/members/{id}/accept-invite: + $ref: paths/api@namespaces@{tenant}@members@{id}@accept-invite.yaml + /api/namespaces/{tenant}/invitations/links: + $ref: paths/api@namespaces@{tenant}@invitations@links.yaml + /api/namespaces/{tenant}/invitations/accept: + $ref: paths/api@namespaces@{tenant}@invitations@accept.yaml + /api/namespaces/{tenant}/invitations/decline: + $ref: paths/api@namespaces@{tenant}@invitations@decline.yaml + /api/namespaces/{tenant}/invitations/{user-id}: + $ref: paths/api@namespaces@{tenant}@invitations@{user-id}.yaml + /api/namespaces/{tenant}/invitations: + $ref: paths/api@namespaces@{tenant}@invitations.yaml + /api/users/invitations: + $ref: paths/api@users@invitations.yaml + /api/namespaces/{tenant}/support: + $ref: paths/api@namespaces@{tenant}@support.yaml + /api/web-endpoints: + $ref: paths/api@web-endpoints.yaml + /api/web-endpoints/{address}: + $ref: paths/api@web-endpoints@{address}.yaml + /api/devices/{uid}/tunnels: + $ref: paths/api@devices@{uid}@tunnels.yaml + /api/devices/{uid}/tunnels/{address}: + $ref: paths/api@devices@{uid}@tunnels@{address}.yaml + /admin/api/authentication: + $ref: paths/admin@api@authentication.yaml + /admin/api/authentication/local: + $ref: paths/admin@api@authentication@local.yaml + /admin/api/authentication/saml: + $ref: paths/admin@api@authentication@saml.yaml + /admin/api/devices: + $ref: paths/admin@api@devices.yaml + /admin/api/devices/{uid}: + $ref: paths/admin@api@devices@{uid}.yaml + /admin/api/devices/{uid}/{status}: + $ref: paths/admin@api@devices@{uid}@{status}.yaml + /admin/api/firewall/rules: + $ref: paths/admin@api@firewall@rules.yaml + /admin/api/firewall/rules/{id}: + $ref: paths/admin@api@firewall@rules@{id}.yaml + /admin/api/sessions: + $ref: paths/admin@api@sessions.yaml + /admin/api/sessions/{uid}: + $ref: paths/admin@api@sessions@{uid}.yaml + /admin/api/sshkeys/public-keys: + $ref: paths/admin@api@sshkeys@public-keys.yaml + /admin/api/namespaces: + $ref: paths/admin@api@namespaces.yaml + /admin/api/namespaces/{tenant}: + $ref: paths/admin@api@namespaces@{tenant}.yaml + /admin/api/namespaces-update/{tenantID}: + $ref: paths/admin@api@namespaces-update@{tenantID}.yaml + /admin/api/login: + $ref: paths/admin@api@login.yaml + /admin/api/users: + $ref: paths/admin@api@users.yaml + /admin/api/users/{id}: + $ref: paths/admin@api@users@{id}.yaml + /admin/api/users/{id}/password/reset: + $ref: paths/admin@api@users@{id}@password@reset.yaml + /admin/api/stats: + $ref: paths/admin@api@stats.yaml + /admin/api/license: + $ref: paths/admin@api@license.yaml + /admin/api/auth/token/{id}: + $ref: paths/admin@api@auth@token@{id}.yaml + /admin/api/export/namespaces: + $ref: paths/admin@api@export@namespaces.yaml + /admin/api/export/users: + $ref: paths/admin@api@export@users.yaml + /admin/api/announcements: + $ref: paths/admin@api@announcements.yaml + /admin/api/announcements/{uuid}: + $ref: paths/admin@api@announcements@{uuid}.yaml diff --git a/openapi/spec/paths/admin@api@announcements.yaml b/openapi/spec/paths/admin@api@announcements.yaml new file mode 100644 index 00000000000..49079eba56e --- /dev/null +++ b/openapi/spec/paths/admin@api@announcements.yaml @@ -0,0 +1,77 @@ +get: + operationId: listAnnouncementsAdmin + summary: List announcements + description: List the announcements posted by ShellHub Cloud. + tags: + - admin + - cloud + - enterprise + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + - name: order_by + schema: + description: Announcements' list order. + type: string + enum: + - asc + - desc + example: asc + default: desc + required: false + in: query + responses: + '200': + description: Success to get the announcements. + headers: + X-Total-Count: + description: Announcements' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/announcementShort.yaml + '400': + $ref: ../components/responses/400.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createAnnouncement + summary: Create an announcement + description: Create an announcement. + tags: + - admin + - cloud + - enterprise + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + title: + $ref: ../components/schemas/announcementTitle.yaml + content: + $ref: ../components/schemas/announcementContent.yaml + responses: + '200': + description: Success to create an announcement. + content: + application/json: + schema: + $ref: ../components/schemas/announcement.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@announcements@{uuid}.yaml b/openapi/spec/paths/admin@api@announcements@{uuid}.yaml new file mode 100644 index 00000000000..18147332d55 --- /dev/null +++ b/openapi/spec/paths/admin@api@announcements@{uuid}.yaml @@ -0,0 +1,82 @@ +get: + operationId: getAnnouncementAdmin + summary: Get a announcement + description: Get a announcement. + tags: + - admin + - cloud + - enterprise + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/announcementUUID.yaml + responses: + '200': + description: Success to get a announcement. + content: + application/json: + schema: + $ref: ../components/schemas/announcement.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +put: + operationId: updateAnnouncement + summary: Update an announcement + description: Update an announcement. + tags: + - admin + - cloud + - enterprise + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/announcementUUID.yaml + requestBody: + content: + application/json: + schema: + type: object + properties: + title: + $ref: ../components/schemas/announcementTitle.yaml + content: + $ref: ../components/schemas/announcementContent.yaml + responses: + '200': + description: Success to update an announcement. + content: + application/json: + schema: + $ref: ../components/schemas/announcement.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteAnnouncement + summary: Delete an announcement + description: Delete an announcement. + tags: + - admin + - cloud + - enterprise + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/announcementUUID.yaml + responses: + '200': + description: Success to delete an announcement. + content: + application/json: + schema: + $ref: ../components/schemas/announcement.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@auth@token@{id}.yaml b/openapi/spec/paths/admin@api@auth@token@{id}.yaml new file mode 100644 index 00000000000..4049a728a04 --- /dev/null +++ b/openapi/spec/paths/admin@api@auth@token@{id}.yaml @@ -0,0 +1,42 @@ +get: + operationId: getUserTokenAdmin + summary: Get user token + description: Get user JWT token to login. + tags: + - admin + - enterprise + - users + security: + - jwt: [] + - api-key: [] + parameters: + - name: id + description: User's ID + schema: + $ref: ../components/schemas/userID.yaml + required: true + in: path + responses: + '200': + description: Success to get the JWT token to login. + content: + application/json: + schema: + type: object + properties: + token: + $ref: ../components/schemas/jwt.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + description: Not Found + content: + application/json: + schema: + type: object + properties: + message: + description: Error message + type: string + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@authentication.yaml b/openapi/spec/paths/admin@api@authentication.yaml new file mode 100644 index 00000000000..08e05fca55f --- /dev/null +++ b/openapi/spec/paths/admin@api@authentication.yaml @@ -0,0 +1,101 @@ +get: + operationId: getAuthenticationSettings + summary: Get Authentication Settings + description: Retrieves the current authentication settings. + tags: + - admin + - authentication-settings + - enterprise + security: + - jwt: [] + responses: + '200': + description: Successfully retrieved the authentication settings. + content: + application/json: + schema: + type: object + properties: + local: + type: object + properties: + enabled: + type: boolean + description: Indicates whether local authentication is active. + saml: + type: object + properties: + enabled: + type: boolean + description: Indicates whether SAML authentication is active. + auth_url: + type: string + description: The URL used to authenticate the SAML user. + assertion_url: + type: string + description: The URL where the IdP must permit redirects. + idp: + type: object + properties: + entity_id: + type: string + description: The Entity ID of the IdP. + certificates: + type: array + description: The list of public X509 certificates of the IdP. + items: + type: string + description: A single public X509 certificate of the IdP. + binding: + type: object + description: | + Configuration for SAML binding methods that define how authentication requests + and responses are transmitted between ShellHub and the IdP. + properties: + post: + type: string + format: url + description: | + The Single Sign-On URL for HTTP-POST binding. This URL is where ShellHub + will redirect users for authentication using the HTTP-POST method. + redirect: + type: string + format: url + description: | + The Single Sign-On URL for HTTP-Redirect binding. This URL is where ShellHub + will redirect users for authentication using the HTTP-Redirect method. + preferred: + type: string + enum: [post, redirect] + description: | + Specifies which binding method ShellHub prefers when both POST and + Redirect bindings are available. If left blank or empty, POST binding is preferred. + mappings: + type: object + properties: + email: + type: string + default: emailAddress + name: + type: string + default: displayName + sp: + type: object + properties: + sign_requests: + type: boolean + description: Indicates whether ShellHub signs authentication requests. + '401': + $ref: ../components/responses/401.yaml + '404': + description: Not Found + content: + application/json: + schema: + type: object + properties: + message: + description: Error message. + type: string + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@authentication@local.yaml b/openapi/spec/paths/admin@api@authentication@local.yaml new file mode 100644 index 00000000000..8368d341e02 --- /dev/null +++ b/openapi/spec/paths/admin@api@authentication@local.yaml @@ -0,0 +1,31 @@ +put: + operationId: configureLocalAuthentication + summary: Configure Local Authentication + description: Configure local authentication settings for the ShellHub instance. + tags: + - admin + - authentication-settings + - enterprise + security: + - jwt: [] + requestBody: + required: true + content: + application/json: + schema: + type: object + required: [enable] + properties: + enable: + type: boolean + description: | + Specifies whether to activate (`true`) or deactivate (`false`) local authentication. + responses: + '200': + $ref: '../components/responses/200.yaml' + '400': + $ref: '../components/responses/400.yaml' + '401': + $ref: '../components/responses/401.yaml' + '500': + $ref: '../components/responses/500.yaml' diff --git a/openapi/spec/paths/admin@api@authentication@saml.yaml b/openapi/spec/paths/admin@api@authentication@saml.yaml new file mode 100644 index 00000000000..ef38badcb34 --- /dev/null +++ b/openapi/spec/paths/admin@api@authentication@saml.yaml @@ -0,0 +1,122 @@ +put: + operationId: configureSAMLAuthentication + summary: Configure SAML Authentication + description: | + Configure SAML authentication settings for the ShellHub instance. + + The NameID in the SAML assertion from the IdP must be configured to use + a format that provides a unique and persistent identifier for each user. This could be + a persistent ID, email address, or any other attribute that uniquely identifies the user + within your IdP. + tags: + - admin + - authentication-settings + - enterprise + security: + - jwt: [] + requestBody: + required: true + content: + application/json: + schema: + type: object + required: [enable, idp, sp] + properties: + enable: + type: boolean + description: | + Specifies whether to activate (`true`) or deactivate (`false`) SAML authentication. + If set to `false`, all other attributes will be ignored. + idp: + type: object + description: | + Configuration for the Identity Provider (IdP) that ShellHub will use for authentication. + You can provide either a metadata URL to fetch all necessary information automatically + or specify the `SignOn URL`, `Entity ID`, and `X509 certificate` explicitly. + properties: + metadata_url: + type: string + format: url + description: | + The URL to fetch IdP metadata. If provided, this will automatically retrieve + all necessary configuration details and take precedence over manually specified values. + entity_id: + type: string + description: The Entity ID of the IdP. + certificate: + type: string + description: | + The public X509 certificate of the IdP. It can be provided with or without + the PEM delimiters (`-----BEGIN CERTIFICATE-----` and `-----END CERTIFICATE-----`). + binding: + type: object + description: | + Configuration for SAML binding methods that define how authentication requests + and responses are transmitted between ShellHub and the IdP. SAML supports + different binding protocols for flexibility in deployment scenarios. + properties: + post: + type: string + format: url + description: | + The Single Sign-On URL for HTTP-POST binding. This URL is where ShellHub + will redirect users for authentication using the HTTP-POST method, which + sends SAML data in the body of an HTTP POST request. This binding is more + secure as it doesn't expose SAML data in URL parameters. + redirect: + type: string + format: url + description: | + The Single Sign-On URL for HTTP-Redirect binding. This URL is where ShellHub + will redirect users for authentication using the HTTP-Redirect method, which + sends SAML data as URL parameters. This binding is simpler but has URL length + limitations and exposes SAML data in browser history and server logs. + preferred: + type: string + enum: [post, redirect] + description: | + Specifies which binding method ShellHub should prefer when both POST and + Redirect bindings are available. If only one binding URL is provided, + that binding will be used regardless of this preference setting. If left + blank or empty, POST binding is preferred by default. + mappings: + type: object + description: | + Defines how SAML attributes from the IdP should be mapped + to ShellHub user attributes. + properties: + email: + type: string + description: | + The name of the SAML attribute that contains the user's email address. + This attribute will be used to identify and authenticate users in ShellHub. + For example, if your IdP sends the email in a SAML attribute named "mail", + set this value to "mail". + default: emailAddress + name: + type: string + description: | + The name of the SAML attribute that contains the user's display name. + This attribute will be used as the user's name in ShellHub. + For example, if your IdP sends the user's name in a SAML attribute + named "displayName", set this value to "displayName". + default: displayName + sp: + type: object + description: Configuration settings for how ShellHub will function as a Service Provider (SP). + properties: + sign_requests: + type: boolean + description: | + Indicates whether ShellHub should sign authentication requests. + If `true`, ShellHub will generate a public X509 certificate that must be deployed on the IdP for validation. + The IdP-initiated workflow is not supported when this option is enabled. + responses: + '200': + $ref: '../components/responses/200.yaml' + '400': + $ref: '../components/responses/400.yaml' + '401': + $ref: '../components/responses/401.yaml' + '500': + $ref: '../components/responses/500.yaml' diff --git a/openapi/spec/paths/admin@api@devices.yaml b/openapi/spec/paths/admin@api@devices.yaml new file mode 100644 index 00000000000..5f35753fc53 --- /dev/null +++ b/openapi/spec/paths/admin@api@devices.yaml @@ -0,0 +1,90 @@ +get: + operationId: getDevicesAdmin + summary: Get devices admin + description: Get a list of devices. + tags: + - admin + - devices + - enterprise + security: + - jwt: [] + - api-key: [] + parameters: + - name: filter + description: | + Device's filter + + + Filter field receives a base64 enconded JSON object for limit a search. + The JSON object should have a property called `type`, it will filter by a `property` called `name` where the value should `contains` `linux`. + + If you want get only Devices name as `Linux`, the JSON object will looks + like this + + ```json + [ + { + "type":"property", + "params": + { + "name":"name", + "operator":"contains", + "value":"linux" + } + } + ] + ``` + + So, the output encoded string will result on: + `W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lIiwib3BlcmF0b3IiOiJjb250YWlucyIsInZhbHVlIjoiZDAifX1d` + schema: + type: string + pattern: ^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$ + example: W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lIiwib3BlcmF0b3IiOiJjb250YWlucyIsInZhbHVlIjoiZDAifX1d + required: false + in: query + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + - name: status + description: Device's status + schema: + $ref: ../components/schemas/deviceStatus.yaml + required: false + in: query + - name: sort_by + description: Device's property to sort of + schema: + type: string + example: name + required: false + in: query + - name: order_by + schema: + description: Device's list order + type: string + enum: + - asc + - desc + example: asc + required: false + in: query + responses: + '200': + description: Success to get a list of devices. + headers: + X-Total-Count: + description: Devices' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/device.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@devices@{uid}.yaml b/openapi/spec/paths/admin@api@devices@{uid}.yaml new file mode 100644 index 00000000000..9654da2e40e --- /dev/null +++ b/openapi/spec/paths/admin@api@devices@{uid}.yaml @@ -0,0 +1,84 @@ +parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml +get: + operationId: getDeviceAdmin + summary: Get device admin + description: Get a device. + tags: + - admin + - devices + - enterprise + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get a device. + content: + application/json: + schema: + $ref: ../components/schemas/device.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteDeviceAdmin + summary: Delete device admin + description: Delete a device. + tags: + - admin + - devices + - enterprise + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to delete a device. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml +patch: + operationId: updateDeviceNameAdmin + summary: Update device name Admin + description: Update device's name. + tags: + - admin + - devices + - enterprise + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + description: Device's new name. + type: string + example: example + required: + - name + responses: + '200': + description: Success to update device's name. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@devices@{uid}@{status}.yaml b/openapi/spec/paths/admin@api@devices@{uid}@{status}.yaml new file mode 100644 index 00000000000..08dea801f3d --- /dev/null +++ b/openapi/spec/paths/admin@api@devices@{uid}@{status}.yaml @@ -0,0 +1,27 @@ +patch: + operationId: updateDeviceStatusAdmin + summary: Update status Admin + description: Update device's status. + tags: + - admin + - devices + - enterprise + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - $ref: ../components/parameters/path/deviceStatusPath.yaml + responses: + '200': + description: Success to update device status. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '402': + description: Payment required. + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@export@namespaces.yaml b/openapi/spec/paths/admin@api@export@namespaces.yaml new file mode 100644 index 00000000000..21003af6ab6 --- /dev/null +++ b/openapi/spec/paths/admin@api@export@namespaces.yaml @@ -0,0 +1,61 @@ +get: + operationId: exportNamespaces + deprecated: true + summary: export namespace + description: | + Export namespaces to csv file. This endpoint has been deprecated and will be removed in v1.0.0. + tags: + - admin + - enterprise + - namespaces + security: + - jwt: [] + - api-key: [] + parameters: + - name: filter + description: | + Namespace's filter + + + Filter field receives a base64 enconded JSON object for limit a search. + The JSON object should have a property called `type`, it will filter by a `property` called `devices` where the value should be 'gt' `0`. + + An example of JSON object will looks like this: + + ```json + [ + { + "type":"property", + "params": + { + "name":"devices", + "operator":"eq", + "value":"0" + } + } + ] + ``` + + So, the output encoded string will result on: + `W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJkZXZpY2VzIiwib3BlcmF0b3IiOiJndCIsInZhbHVlIjoiMCJ9fV0=` + schema: + type: string + pattern: ^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$ + example: W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJkZXZpY2VzIiwib3BlcmF0b3IiOiJndCIsInZhbHVlIjoiMCJ9fV0= + in: query + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to export namespaces. + content: + application/octet-stream: + schema: + type: string + format: 'binary' + '204': + description: No content. + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@export@users.yaml b/openapi/spec/paths/admin@api@export@users.yaml new file mode 100644 index 00000000000..c65043033fa --- /dev/null +++ b/openapi/spec/paths/admin@api@export@users.yaml @@ -0,0 +1,58 @@ +get: + operationId: exportUsers + summary: export users + description: Export users to csv file. + tags: + - admin + - enterprise + - users + security: + - jwt: [] + parameters: + - name: filter + description: | + User's filter + + + Filter field receives a base64 enconded JSON object for limit a search. + The JSON object should have a property called `type`, it will filter by a `property` called `namespaces` where the value should be `eq` to `0`. + + An example of JSON object will looks like this: + + ```json + [ + { + "type":"property", + "params": + { + "name":"namespace", + "operator":"eq", + "value":"0" + } + } + ] + ``` + + So, the output encoded string will result on: + `W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lc3BhY2VzIiwib3BlcmF0b3IiOiJndCIsInZhbHVlIjoiMCJ9fV0=` + schema: + type: string + pattern: ^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$ + example: W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lc3BhY2VzIiwib3BlcmF0b3IiOiJndCIsInZhbHVlIjoiMCJ9fV0= + in: query + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to export users. + content: + application/octet-stream: + schema: + type: string + format: 'binary' + '204': + description: No content. + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@firewall@rules.yaml b/openapi/spec/paths/admin@api@firewall@rules.yaml new file mode 100644 index 00000000000..48266cb285f --- /dev/null +++ b/openapi/spec/paths/admin@api@firewall@rules.yaml @@ -0,0 +1,69 @@ +post: + operationId: createFirewallRuleAdmin + summary: Create firewall rule Admin + description: Create a firewall rule. + tags: + - admin + - enterprise + - rules + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesRequest.yaml + responses: + '200': + description: Success to create firewall rule. + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesResponse.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +get: + operationId: getFirewallRulesAdmin + summary: Get firewall rules Admin + description: Get a list of firewall rules. + tags: + - admin + - enterprise + - rules + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get firewall rules. + headers: + X-Total-Count: + description: Firewall rules' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/firewallRulesResponse.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@firewall@rules@{id}.yaml b/openapi/spec/paths/admin@api@firewall@rules@{id}.yaml new file mode 100644 index 00000000000..1048a37a5b7 --- /dev/null +++ b/openapi/spec/paths/admin@api@firewall@rules@{id}.yaml @@ -0,0 +1,82 @@ +parameters: + - name: id + schema: + description: Firewall rule's ID + type: string + in: path + required: true +get: + operationId: getFirewallRuleAdmin + summary: Get firewall rule admin + description: Get a firewall rule. + tags: + - admin + - enterprise + - rules + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get firewall rule. + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesResponse.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +put: + operationId: updateFirewallRuleAdmin + summary: Update firewall rule admin + description: Update a firewall rule. + tags: + - admin + - enterprise + - rules + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesRequest.yaml + responses: + '200': + description: Success to update firewall rule. + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesResponse.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteFirewallRuleAdmin + summary: Delete firewall rule admin + description: Delete a firewall rule. + tags: + - admin + - enterprise + - rules + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to delete a firewall rule. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@license.yaml b/openapi/spec/paths/admin@api@license.yaml new file mode 100644 index 00000000000..0ac2e8cdd85 --- /dev/null +++ b/openapi/spec/paths/admin@api@license.yaml @@ -0,0 +1,426 @@ +get: + operationId: getLicense + summary: Get license data + description: Get the license data. + tags: + - admin + - enterprise + - license + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get data. + content: + application/json: + schema: + type: object + properties: + id: + description: License's ID + type: string + example: x3dd0d1f8-8246-4519-b11a-a3dd33717f65 + expired: + description: License's expired status + type: boolean + about_to_expire: + description: License's about to expire status + type: boolean + grace_period: + description: License's grace period status + type: boolean + issued_at: + description: |- + License's issued at + + It is the number of seconds elapsed since January 1, 1970 UTC. + type: integer + minimum: -1 + example: -1 + starts_at: + description: |- + License's started at + + It is the number of seconds elapsed since January 1, 1970 UTC. + type: integer + minimum: -1 + example: -1 + expires_at: + description: |- + License's expired at + + It is the number of seconds elapsed since January 1, 1970 UTC. + + `-1` means license does not expire + type: integer + minimum: -1 + example: -1 + allowed_regions: + description: |- + License's allowed regions + + It is a list of regions in `ISO 3166-1 alpha-2` format. + type: array + items: + description: License's allowed regions + type: string + enum: + - AD + - AE + - AF + - AG + - AI + - AL + - AM + - AO + - AQ + - AR + - AS + - AT + - AU + - AW + - AX + - AZ + - BA + - BB + - BD + - BE + - BF + - BG + - BH + - BI + - BJ + - BL + - BM + - BN + - BO + - BQ + - BR + - BS + - BT + - BV + - BW + - BY + - BZ + - CA + - CC + - CD + - CF + - CG + - CH + - CI + - CK + - CL + - CM + - CN + - CO + - CR + - CU + - CV + - CW + - CX + - CY + - CZ + - DE + - DJ + - DK + - DM + - DO + - DZ + - EC + - EE + - EG + - EH + - ER + - ES + - ET + - FI + - FJ + - FK + - FM + - FO + - FR + - GA + - GB + - GD + - GE + - GF + - GG + - GH + - GI + - GL + - GM + - GN + - GP + - GQ + - GR + - GS + - GT + - GU + - GW + - GY + - HK + - HM + - HN + - HR + - HT + - HU + - ID + - IE + - IL + - IM + - IN + - IO + - IQ + - IR + - IS + - IT + - JE + - JM + - JO + - JP + - KE + - KG + - KH + - KI + - KM + - KN + - KP + - KR + - KW + - KY + - KZ + - LA + - LB + - LC + - LI + - LK + - LR + - LS + - LT + - LU + - LV + - LY + - MA + - MC + - MD + - ME + - MF + - MG + - MH + - MK + - ML + - MM + - MN + - MO + - MP + - MQ + - MR + - MS + - MT + - MU + - MV + - MW + - MX + - MY + - MZ + - NA + - NC + - NE + - NF + - NG + - NI + - NL + - NO + - NP + - NR + - NU + - NZ + - OM + - PA + - PE + - PF + - PG + - PH + - PK + - PL + - PM + - PN + - PR + - PS + - PT + - PW + - PY + - QA + - RE + - RO + - RS + - RU + - RW + - SA + - SB + - SC + - SD + - SE + - SG + - SH + - SI + - SJ + - SK + - SL + - SM + - SN + - SO + - SR + - SS + - ST + - SV + - SX + - SY + - SZ + - TC + - TD + - TF + - TG + - TH + - TJ + - TK + - TM + - TN + - TO + - TR + - TT + - TV + - TW + - TZ + - UA + - UG + - UM + - US + - UY + - UZ + - VA + - VC + - VE + - VG + - VI + - VN + - VU + - WF + - WS + - YE + - YT + - ZA + - ZM + - ZW + example: [] + customer: + description: License's customer + type: object + properties: + id: + description: Customer's ID + type: string + example: x3dd0d1f8-8246-4519-b11a-a3dd33717f65 + name: + description: Customer's name + type: string + example: ShellHub Full Perpetual License + email: + description: Customer's email + type: string + format: email + example: contato@ossystems.com.br + company: + description: Customer's company + type: string + example: O.S. Systems + features: + description: License's features + type: object + properties: + devices: + description: | + Number of devices allowed + + + `-1` means unlimited number of devices and any other + number means the number of devices allowed + type: integer + minimum: -1 + example: -1 + session_recording: + description: Session recording status + type: boolean + example: true + firewall_rules: + description: Firewall rules status + type: boolean + example: true + reports: + description: Reports status + type: boolean + example: true + login_link: + description: Login link status + type: boolean + example: true + billing: + description: Billing status + type: boolean + example: true + required: + - devices + - session_recording + - firewall_rules + - reports + - login_link + - billing + required: + - id + - expired + - about_to_expire + - grace_period + - issued_at + - starts_at + - expires_at + - allowed_regions + - customer + - features + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: sendLicense + summary: Send license data + description: Send license data + tags: + - admin + - enterprise + - license + security: + - jwt: [] + - api-key: [] + requestBody: + content: + multipart/form-data: + schema: + type: object + properties: + file: + description: License's file + type: string + format: binary + example: | + ewogICJpZCI6ICJ4eHh4eHh4eC14eHh4LXh4eHgteHh4eC14eHh4eHh4eHh4eHgiLAogICJpc3N1ZWRfYXQiOiAtMSwKICAic3RhcnRzX2F0IjogLTEsCiAgImV4cGlyZXNfYXQiOiAtMSwKICAiY3VzdG9tZXIiOiB7CiAgICAiaWQiOiAieHh4eHh4eHgteHh4eC14eHh4LXh4eHgteHh4eHh4eHh4eHh4IiwKICAgICJuYW1lIjogIlNoZWxsSHViIEZ1bGwgUGVycGV0dWFsIExpY2Vuc2UiLAogICAgImVtYWlsIjogImNvbnRhdG9Ab3NzeXN0ZW1zLmNvbS5iciIsCiAgICAiY29tcGFueSI6ICJPLlMuIFN5c3RlbXMiCiAgfSwKICAiZmVhdHVyZXMiOiB7CiAgICAiZGV2aWNlcyI6IC0xLAogICAgInNlc3Npb25fcmVjb3JkaW5nIjogdHJ1ZSwKICAgICJmaXJld2FsbF9ydWxlcyI6IHRydWUsCiAgICAicmVwb3J0cyI6IHRydWUsCiAgICAibG9naW5fbGluayI6IHRydWUsCiAgICAiYmlsbGluZyI6IHRydWUKICB9Cn0KHtQQZHb8b5kZ + responses: + '200': + description: Success to send license data + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@login.yaml b/openapi/spec/paths/admin@api@login.yaml new file mode 100644 index 00000000000..f48c291d1b7 --- /dev/null +++ b/openapi/spec/paths/admin@api@login.yaml @@ -0,0 +1,50 @@ +post: + operationId: loginAdmin + summary: Login on Admin + description: Login on Admin + tags: + - admin + - enterprise + security: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + username: + description: Admin's username + type: string + minLength: 3 + maxLength: 20 + password: + description: Admin's password + type: string + minLength: 3 + maxLength: 30 + required: + - username + - password + responses: + '200': + description: Success to Login on Admin + content: + application/json: + schema: + type: object + properties: + token: + description: JWT token + type: string + example: | + eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJleGFtcGxlIiwibmFtZSI6ImV4YW1wbGUiLCJpYXQiOjE1MTYyMzkwMjJ9.zqCt70KspnNnitZlv89hDbFZ5iGMMRUn0wFEmmlY-to + user: + description: username + type: string + minLength: 3 + maxLength: 20 + example: admin + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@namespaces-update@{tenantID}.yaml b/openapi/spec/paths/admin@api@namespaces-update@{tenantID}.yaml new file mode 100644 index 00000000000..1fc437f3288 --- /dev/null +++ b/openapi/spec/paths/admin@api@namespaces-update@{tenantID}.yaml @@ -0,0 +1,30 @@ +parameters: + - name: tenantID + description: Namespace's tenant ID + schema: + $ref: ../components/schemas/namespaceTenantID.yaml + required: true + in: path +put: + operationId: editNamespaceAdmin + summary: Edit namespace admin + description: Edit a namespace. + tags: + - admin + - enterprise + - namespaces + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/namespace.yaml + responses: + '200': + description: Success to edit a namespace. + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@namespaces.yaml b/openapi/spec/paths/admin@api@namespaces.yaml new file mode 100644 index 00000000000..4c978c3ac1b --- /dev/null +++ b/openapi/spec/paths/admin@api@namespaces.yaml @@ -0,0 +1,65 @@ +get: + operationId: getNamespacesAdmin + summary: Get namespaces admin + description: Returns a list of namespaces. + tags: + - admin + - enterprise + - namespaces + security: + - jwt: [] + - api-key: [] + parameters: + - name: filter + description: | + Namespaces's filter. + + + Filter field receives a base64 enconded JSON object for limit a search. + The JSON object should have a property called `type`, it will filter by a `property` called `name` where the value should `contains` `examplespace`. + + If you want get only Namespaces name as `examplespace`, the JSON object will looks + like this + + ```json + [ + { + "type":"property", + "params":{ + "name":"name", + "operator":"contains", + "value":"examplespace" + } + } + ] + ``` + + So, the output encoded string will result on: + `W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lIiwib3BlcmF0b3IiOiJjb250YWlucyIsInZhbHVlIjoiZXhhbXBsZXNwYWNlIn19XQ==` + schema: + type: string + pattern: ^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$ + example: W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lIiwib3BlcmF0b3IiOiJjb250YWlucyIsInZhbHVlIjoiZXhhbXBsZXNwYWNlIn19XQ== + required: false + in: query + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get a namespace list. + headers: + X-Total-Count: + description: Namespaces' total number. + schema: + type: string + minimum: 0 + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/namespace.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@namespaces@{tenant}.yaml b/openapi/spec/paths/admin@api@namespaces@{tenant}.yaml new file mode 100644 index 00000000000..a190ed94451 --- /dev/null +++ b/openapi/spec/paths/admin@api@namespaces@{tenant}.yaml @@ -0,0 +1,88 @@ +parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml +get: + operationId: getNamespaceAdmin + summary: Get namespace admin + description: Get a namespace. + tags: + - admin + - enterprise + - namespaces + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get a namespace. + content: + application/json: + schema: + $ref: ../components/schemas/namespace.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createNamespaceAdmin + summary: Create namespace admin + description: Create a namespace. + tags: + - admin + - enterprise + - namespaces + security: + - jwt: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + $ref: ../components/schemas/namespaceName.yaml + required: + - name + responses: + '200': + description: Success to create a namespace. + content: + application/json: + schema: + $ref: ../components/schemas/namespace.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + # '403': + # $ref: ../components/responses/403.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteNamespaceAdmin + summary: Delete namespace admin + description: Delete a namespace. + tags: + - admin + - enterprise + - namespaces + security: + - jwt: [] + responses: + '200': + description: Success to delete a namespace. + '401': + $ref: ../components/responses/401.yaml + '402': + $ref: ../components/responses/402.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@sessions.yaml b/openapi/spec/paths/admin@api@sessions.yaml new file mode 100644 index 00000000000..7ed6a2a38db --- /dev/null +++ b/openapi/spec/paths/admin@api@sessions.yaml @@ -0,0 +1,34 @@ +get: + operationId: getSessionsAdmin + summary: Get sessions admin + description: Get a list sessions. + tags: + - admin + - enterprise + - sessions + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get list of sessions. + headers: + X-Total-Count: + description: Sessions' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/session.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@sessions@{uid}.yaml b/openapi/spec/paths/admin@api@sessions@{uid}.yaml new file mode 100644 index 00000000000..36bb04cfe92 --- /dev/null +++ b/openapi/spec/paths/admin@api@sessions@{uid}.yaml @@ -0,0 +1,51 @@ +parameters: + - $ref: ../components/parameters/path/sessionUIDPath.yaml +get: + operationId: getSessionAdmin + summary: Get session admin + description: Get a session. + tags: + - admin + - enterprise + - sessions + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get a session. + content: + application/json: + schema: + $ref: ../components/schemas/session.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: setSessionAuthenticationStatusAdmin + summary: Set session authentication status admin + description: Set session authentication status. + tags: + - admin + - enterprise + - sessions + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + authenticated: + description: Session's authentication status. + type: boolean + responses: + '200': + $ref: ../components/responses/200.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@sshkeys@public-keys.yaml b/openapi/spec/paths/admin@api@sshkeys@public-keys.yaml new file mode 100644 index 00000000000..bdeba3fe104 --- /dev/null +++ b/openapi/spec/paths/admin@api@sshkeys@public-keys.yaml @@ -0,0 +1,68 @@ +get: + operationId: getPublicKeysAdmin + summary: Get public keys admin + description: Get a list from all public keys. + tags: + - enterprise + - ssh + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/filterQuery.yaml + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get a list of public keys. + headers: + X-Total-Count: + description: Public keys' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + $ref: ../components/schemas/publicKeyResponse.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createPublicKeyAdmin + summary: Create public key admin + description: Create a new public key. + tags: + - enterprise + - ssh + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/publicKeyRequest.yaml + responses: + '200': + description: Success to create a public key. + content: + application/json: + schema: + $ref: ../components/schemas/publicKeyResponse.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '422': + description: UnprocessableEntity + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@stats.yaml b/openapi/spec/paths/admin@api@stats.yaml new file mode 100644 index 00000000000..338bf978a35 --- /dev/null +++ b/openapi/spec/paths/admin@api@stats.yaml @@ -0,0 +1,47 @@ +get: + operationId: getStats + summary: Get stats + description: Get stats about the ShellHub instance. + tags: + - admin + - enterprise + - stats + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get stats about the ShellHub instance + content: + application/json: + schema: + type: object + properties: + registered_users: + description: Number of registered users. + type: integer + minimum: 0 + registered_devices: + description: Number of registered devices. + type: integer + minimum: 0 + online_devices: + description: Number of online devices. + type: integer + minimum: 0 + active_sessions: + description: Number of active sessions. + type: integer + minimum: 0 + pending_devices: + description: Number of pending devices. + type: integer + minimum: 0 + rejected_devices: + description: Number of rejected devices. + type: integer + minimum: 0 + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@users.yaml b/openapi/spec/paths/admin@api@users.yaml new file mode 100644 index 00000000000..a0991553066 --- /dev/null +++ b/openapi/spec/paths/admin@api@users.yaml @@ -0,0 +1,99 @@ +post: + operationId: createUserAdmin + summary: Create a User admin + description: Create a User. + tags: + - admin + - enterprise + - users + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/userAdminRequest.yaml + example: + name: example + email: example@example.com + username: example + password: example + responses: + '200': + $ref: ../components/responses/200.yaml + '400': + $ref: ../components/responses/invalidFields.yaml + '401': + $ref: ../components/responses/401.yaml + '409': + $ref: ../components/responses/conflictFields.yaml + '500': + $ref: ../components/responses/500.yaml +get: + operationId: getUsers + summary: Get users + description: Get a list of users. + tags: + - admin + - enterprise + - users + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/filterQuery.yaml + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get a list of users. + headers: + X-Total-Count: + description: users' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/userAdminResponse.yaml + examples: + one_user: + value: + - id: 507f1f77bcf86cd799439011 + namespaces: 0 + confirmed: false + created_at: 2020-05-01T00:00:00.000Z + last_login: 2020-05-01T00:00:00.000Z + name: example + email: example@example.com + username: example + password: 50d858e0985ecc7f60418aaf0cc5ab587f42c2570a884095a9e8ccacd0f6545c + two_users: + value: + - id: 507f1f77bcf86cd799439011 + namespaces: 0 + confirmed: true + created_at: 2020-05-01T00:00:00.000Z + last_login: 2020-05-01T00:00:00.000Z + name: example + email: example@example.com + username: example + password: 50d858e0985ecc7f60418aaf0cc5ab587f42c2570a884095a9e8ccacd0f6545c + - id: 507f191e810c19729de860ea + namespaces: 2 + confirmed: false + created_at: 2012-01-02T00:00:00.000Z + last_login: 2012-01-02T00:00:00.000Z + name: example + email: example@example.com + username: example + password: 50d858e0985ecc7f60418aaf0cc5ab587f42c2570a884095a9e8ccacd0f6545c + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@users@{id}.yaml b/openapi/spec/paths/admin@api@users@{id}.yaml new file mode 100644 index 00000000000..ffec62a0d81 --- /dev/null +++ b/openapi/spec/paths/admin@api@users@{id}.yaml @@ -0,0 +1,196 @@ +parameters: + - name: id + schema: + description: User's ID + type: string + example: 507f1f77bcf86cd799439011 + required: true + in: path +delete: + operationId: adminDeleteUser + summary: Delete user + description: Delete a user. + tags: + - admin + - enterprise + - users + security: + - jwt: [] + - api-key: [] + responses: + '200': + $ref: ../components/responses/200.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +put: + operationId: adminUpdateUser + summary: Update user + description: Update a user. + tags: + - admin + - enterprise + - users + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/userAdminRequest.yaml + examples: + update_name: + value: + name: example_new + email: example@example.com + username: example + password: '' + confirmed: true + update_password: + value: + name: example + email: example_new@example.com + username: example + password: example_new + confirmed: true + responses: + '200': + $ref: ../components/responses/200.yaml + '400': + $ref: ../components/responses/invalidFields.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/conflictFields.yaml + '500': + $ref: ../components/responses/500.yaml +get: + operationId: getUser + summary: Get user + description: Get a user. + tags: + - admin + - enterprise + - users + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get a user. + content: + application/json: + schema: + type: object + properties: + id: + $ref: ../components/schemas/userID.yaml + status: + description: User's status + type: string + enum: [confirmed, pending] + max_namespaces: + description: Maximum number of namespaces the user can own + type: integer + minimum: 0 + created_at: + description: User's creating date + type: string + format: date-time + last_login: + description: User's last login date + type: string + format: date-time + name: + $ref: ../components/schemas/userName.yaml + username: + $ref: ../components/schemas/userUsername.yaml + email: + $ref: ../components/schemas/userEmail.yaml + recovery_email: + description: User's recovery email address + type: string + format: email + mfa: + description: Multi-factor authentication settings + type: object + properties: + enabled: + description: Whether MFA is enabled for the user + type: boolean + required: + - enabled + namespacesOwned: + description: Number of namespaces owned by the user + type: integer + minimum: 0 + preferences: + description: User preferences + type: object + properties: + auth_methods: + description: Preferred authentication methods + type: array + items: + type: string + enum: [local, saml] + required: + - auth_methods + required: + - id + - status + - max_namespaces + - created_at + - last_login + - name + - username + - email + - mfa + - namespacesOwned + - preferences + examples: + user_confirmed: + value: + id: '68ada99d2f96ee43fd86306a' + status: 'confirmed' + max_namespaces: 0 + created_at: '2025-08-26T12:33:33.19Z' + last_login: '2025-10-27T13:10:59.458Z' + name: 'dev' + username: 'dev' + email: 'dev@dev.com' + recovery_email: 'banana@banana.com' + mfa: + enabled: false + namespacesOwned: 1 + preferences: + auth_methods: + - 'local' + user_pending: + value: + id: '507f1f77bcf86cd799439012' + status: 'pending' + max_namespaces: 5 + created_at: '2025-01-15T10:00:00.000Z' + last_login: '2025-01-20T15:30:00.000Z' + name: 'testuser' + username: 'testuser' + email: 'test@example.com' + recovery_email: 'recovery@example.com' + mfa: + enabled: true + namespacesOwned: 2 + preferences: + auth_methods: + - 'local' + - 'saml' + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/admin@api@users@{id}@password@reset.yaml b/openapi/spec/paths/admin@api@users@{id}@password@reset.yaml new file mode 100644 index 00000000000..ec993c0ffb6 --- /dev/null +++ b/openapi/spec/paths/admin@api@users@{id}@password@reset.yaml @@ -0,0 +1,52 @@ +patch: + operationId: adminResetUserPassword + summary: Reset user password + description: | + Resets the password for a specified user. It's particularly useful for + users who initially authenticated via SAML and therefore may not have a + password set up. This enables them to maintain access even if their + original authentication method becomes unavailable (e.g., if SAML + authentication is disabled). + + The endpoint generates a secure 16-character random password that includes: + - Uppercase letters + - Lowercase letters + - Numbers + - Special characters + + Users are strongly encouraged to change this temporary password after their + first successful authentication. If the user already has a password, a `400 + Bad Request` status code will be returned. + tags: + - admin + - enterprise + - users + security: + - jwt: [] + parameters: + - name: id + description: The ID of the user whose password needs to be reset + schema: + type: string + required: true + in: path + responses: + '200': + description: Password successfully reset + content: + application/json: + schema: + type: object + properties: + password: + type: string + description: The newly generated password + example: gpUBz14a%ZLvmDJC + '400': + $ref: ../components/responses/invalidFields.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@announcements.yaml b/openapi/spec/paths/api@announcements.yaml new file mode 100644 index 00000000000..fdcf5b98ab1 --- /dev/null +++ b/openapi/spec/paths/api@announcements.yaml @@ -0,0 +1,45 @@ +get: + operationId: listAnnouncements + summary: List announcements + description: List the announcements posted by ShellHub Cloud. + tags: + - announcements + - community + servers: + - url: https://cloud.shellhub.io + description: ShellHub Cloud API server + security: [] + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + - name: order_by + schema: + description: Announcements' list order. + type: string + enum: + - asc + - desc + example: asc + default: desc + required: false + in: query + responses: + '200': + description: Success to get the announcements. + headers: + X-Total-Count: + description: Announcements' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/announcementShort.yaml + '400': + $ref: ../components/responses/400.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@announcements@{uuid}.yaml b/openapi/spec/paths/api@announcements@{uuid}.yaml new file mode 100644 index 00000000000..d74f717894d --- /dev/null +++ b/openapi/spec/paths/api@announcements@{uuid}.yaml @@ -0,0 +1,24 @@ +get: + operationId: getAnnouncement + summary: Get a announcement + description: Get a announcement. + tags: + - announcements + - community + servers: + - url: https://cloud.shellhub.io + description: ShellHub Cloud API server + security: [] + parameters: + - $ref: ../components/parameters/path/announcementUUID.yaml + responses: + '200': + description: Success to get a announcement. + content: + application/json: + schema: + $ref: ../components/schemas/announcement.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@auth@device.yaml b/openapi/spec/paths/api@auth@device.yaml new file mode 100644 index 00000000000..c7f20ef5782 --- /dev/null +++ b/openapi/spec/paths/api@auth@device.yaml @@ -0,0 +1,73 @@ +post: + operationId: authDeviceV2 + summary: Auth device v2 + description: | + Auth a device. + + Every 30 seconds, this route is hit by internal ShellHub to check device + availability. + tags: + - device + security: + - jwt: [] + - api-key: [] + parameters: + - name: X-Real-IP + schema: + description: | + Device's IP address. + + + `X-Real-IP` header is used to set a geolocation position to device when + `geoip` feature is enable. + type: string + pattern: ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ + example: 127.0.0.1 + in: header + requestBody: + content: + application/json: + schema: + type: object + properties: + uid: + $ref: ../components/schemas/deviceUID.yaml + token: + $ref: ../components/schemas/deviceToken.yaml + name: + $ref: ../components/schemas/deviceName.yaml + namespace: + $ref: ../components/schemas/deviceNamespaceName.yaml + required: + - uid + - token + - name + - namespaces + responses: + '200': + description: Success to auth device + content: + application/json: + schema: + type: object + properties: + UID: + description: Device UID + type: string + Token: + description: Device token + type: string + Name: + description: Device name + type: string + Namespace: + description: Device owner namespace + type: string + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@auth@ssh.yaml b/openapi/spec/paths/api@auth@ssh.yaml new file mode 100644 index 00000000000..d1428b9e16f --- /dev/null +++ b/openapi/spec/paths/api@auth@ssh.yaml @@ -0,0 +1,43 @@ +post: + operationId: authSSHPublicKey + summary: Auth SSH public key + description: Authenticate a SSH public key to ShellHub server. + security: + - jwt: [] + - api-key: [] + tags: + - community + - internal + - ssh + requestBody: + content: + application/json: + schema: + type: object + properties: + fingerprint: + $ref: ../components/schemas/publickKeyFingerprint.yaml + data: + description: Public key's data in PEM format. + type: string + format: byte + required: + - fingerprint + - data + responses: + '200': + description: Success to auth a SSH public key. + content: + application/json: + schema: + type: object + properties: + signature: + description: Device's signature. + type: string + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@auth@token@{tenant}.yaml b/openapi/spec/paths/api@auth@token@{tenant}.yaml new file mode 100644 index 00000000000..eeaab25ef79 --- /dev/null +++ b/openapi/spec/paths/api@auth@token@{tenant}.yaml @@ -0,0 +1,34 @@ +get: + operationId: getNamespaceToken + summary: Get a new namespace's token + description: "This route works like a login's one; returns a JWT token and extra + + information about namespace. + + + You can use this route to swap between namespaces. + + " + tags: + - community + - namespaces + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + responses: + '200': + description: Success to get namespace's token + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@auth@user.yaml b/openapi/spec/paths/api@auth@user.yaml new file mode 100644 index 00000000000..daec1f617fe --- /dev/null +++ b/openapi/spec/paths/api@auth@user.yaml @@ -0,0 +1,61 @@ +post: + operationId: authUser + summary: Auth a user + description: Authenticate a user, returning the session's JWT token and data about the user. + tags: + - community + - external + - users + security: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + username: + $ref: ../components/schemas/userUsername.yaml + password: + $ref: ../components/schemas/userPassword.yaml + required: + - username + - password + responses: + '200': + description: Success to auth the user. + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +get: + operationId: getUserInfo + summary: Get user info + tags: + - community + - users + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get the user info. + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@customer.yaml b/openapi/spec/paths/api@billing@customer.yaml new file mode 100644 index 00000000000..d702ec88bda --- /dev/null +++ b/openapi/spec/paths/api@billing@customer.yaml @@ -0,0 +1,102 @@ +post: + operationId: createCustomer + summary: Create customer + description: creates a new customer defining, optionaly, the default payment method. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success to create a new customer. + '400': + $ref: ../components/schemas/billingError.yaml + '401': + $ref: ../components/schemas/billingError.yaml + '403': + $ref: ../components/schemas/billingError.yaml + '404': + $ref: ../components/schemas/billingError.yaml + '409': + $ref: ../components/schemas/billingError.yaml + '424': + $ref: ../components/schemas/billingError.yaml + '500': + $ref: ../components/responses/500.yaml +get: + operationId: getCustomer + summary: Get Customer + description: Get the customer. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success to get a customer. + content: + application/json: + schema: + type: object + properties: + id: + description: Customer's ID. + type: string + example: 'cus_H9J5n2eZvKYlo2C7X1QX2Qg' + name: + description: Customer's name. + type: string + example: 'user' + email: + description: Customer's e-mail. + type: string + format: email + example: 'user@shellhub.io' + payment_methods: + description: Customer's payment methods. + type: array + items: + type: object + properties: + id: + description: Payment method's ID. + type: string + example: 'pm_1H9J5n2eZvKYlo2C7X1QX2Qg' + number: + description: Payment method card's number. + type: string + example: '4242424242424242' + brand: + description: Payment method card's brand. + type: string + example: 'visa' + exp_month: + description: Payment method card's expiration month. + type: integer + example: 10 + exp_year: + description: Payment method card's expiration year. + type: integer + example: 2030 + cvc: + description: Payment method card's CVC. + type: string + example: '123' + default: + description: Payment method default status. + type: boolean + example: true + '400': + $ref: ../components/schemas/billingError.yaml + '403': + $ref: ../components/schemas/billingError.yaml + '404': + $ref: ../components/schemas/billingError.yaml + '424': + $ref: ../components/schemas/billingError.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@device-choice.yaml b/openapi/spec/paths/api@billing@device-choice.yaml new file mode 100644 index 00000000000..b210328a5e2 --- /dev/null +++ b/openapi/spec/paths/api@billing@device-choice.yaml @@ -0,0 +1,36 @@ +post: + operationId: choiceDevices + summary: Choice devices + description: Choice devices when device's limit is rechead. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + + - billing + requestBody: + content: + application/json: + schema: + type: object + properties: + choices: + description: Device's list. + type: array + items: + description: Device's ID. + type: string + minItems: 0 + maxItems: 3 + required: + - choices + responses: + '200': + description: Success to choice devices. + '400': + $ref: ../components/responses/400.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@devices-most-used.yaml b/openapi/spec/paths/api@billing@devices-most-used.yaml new file mode 100644 index 00000000000..1e69427b4f2 --- /dev/null +++ b/openapi/spec/paths/api@billing@devices-most-used.yaml @@ -0,0 +1,30 @@ +get: + operationId: getDevicesMostUsed + summary: Get devices most used + description: Get the most used devices. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success to get the most used devices. + headers: + X-Total-Count: + description: Devices' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/device.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@evaluate.yaml b/openapi/spec/paths/api@billing@evaluate.yaml new file mode 100644 index 00000000000..1e6d29f32a0 --- /dev/null +++ b/openapi/spec/paths/api@billing@evaluate.yaml @@ -0,0 +1,63 @@ +post: + operationId: evaluate + summary: Evaluate + description: evaluate the namespace capabilities. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success to evaluate the namespace. + content: + application/json: + schema: + type: object + properties: + can_accept: + description: Namespaces's acceptance status. + type: boolean + example: false + can_connect: + description: Namespace's connection status. + type: boolean + example: true + '400': + description: Bad Request. + content: + application/json: + schema: + description: Error's message. + type: string + '401': + description: Unauthorized. + content: + application/json: + schema: + description: Error's message. + type: string + '403': + description: Forbidden. + content: + application/json: + schema: + description: Error's message. + type: string + '404': + description: Not found. + content: + application/json: + schema: + description: Error's message. + type: string + '424': + description: Failed dependency. + content: + application/json: + schema: + description: Error's message. + type: string + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@paymentmethod@attach.yaml b/openapi/spec/paths/api@billing@paymentmethod@attach.yaml new file mode 100644 index 00000000000..ba9f4d4131f --- /dev/null +++ b/openapi/spec/paths/api@billing@paymentmethod@attach.yaml @@ -0,0 +1,37 @@ +post: + operationId: attachPaymentMethod + summary: Attach payment method + description: Attachs a payment method to a customer. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + requestBody: + content: + application/json: + schema: + type: object + properties: + id: + description: Payment method's ID. + type: string + example: 'pm_H9J5n2eZvKYlo2C7X1QX2Qg' + required: + - id + responses: + '200': + description: Success to attach the payment method to a customer. + '400': + $ref: ../components/schemas/billingError.yaml + '401': + $ref: ../components/schemas/billingError.yaml + '403': + $ref: ../components/schemas/billingError.yaml + '404': + $ref: ../components/schemas/billingError.yaml + '424': + $ref: ../components/schemas/billingError.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@paymentmethod@default.yaml b/openapi/spec/paths/api@billing@paymentmethod@default.yaml new file mode 100644 index 00000000000..5c9fb33f891 --- /dev/null +++ b/openapi/spec/paths/api@billing@paymentmethod@default.yaml @@ -0,0 +1,37 @@ +post: + operationId: setDefaultPaymentMethod + summary: Set default payment method + description: Set default payment method to the customer. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + requestBody: + content: + application/json: + schema: + type: object + properties: + id: + description: Payment method's ID. + type: string + example: 'pm_H9J5n2eZvKYlo2C7X1QX2Qg' + required: + - id + responses: + '200': + description: Success to set default payment method. + '400': + $ref: ../components/schemas/billingError.yaml + '401': + $ref: ../components/schemas/billingError.yaml + '403': + $ref: ../components/schemas/billingError.yaml + '404': + $ref: ../components/schemas/billingError.yaml + '424': + $ref: ../components/schemas/billingError.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@paymentmethod@detach.yaml b/openapi/spec/paths/api@billing@paymentmethod@detach.yaml new file mode 100644 index 00000000000..3f67ad72c1f --- /dev/null +++ b/openapi/spec/paths/api@billing@paymentmethod@detach.yaml @@ -0,0 +1,37 @@ +post: + operationId: detachPaymentMethod + summary: Detach payment method + description: Detachs a payment method from a customer. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + requestBody: + content: + application/json: + schema: + type: object + properties: + id: + description: Payment method's ID. + type: string + example: 'pm_H9J5n2eZvKYlo2C7X1QX2Qg' + required: + - id + responses: + '200': + description: Success to detach the payment method from a customer. + '400': + $ref: ../components/schemas/billingError.yaml + '401': + $ref: ../components/schemas/billingError.yaml + '403': + $ref: ../components/schemas/billingError.yaml + '404': + $ref: ../components/schemas/billingError.yaml + '424': + $ref: ../components/schemas/billingError.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@report.yaml b/openapi/spec/paths/api@billing@report.yaml new file mode 100644 index 00000000000..22b9c8a334a --- /dev/null +++ b/openapi/spec/paths/api@billing@report.yaml @@ -0,0 +1,64 @@ +post: + operationId: report + summary: Report + description: Report an action. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + parameters: + - name: action + schema: + type: string + enum: ['device_accept', 'namespace_delete'] + required: true + in: query + responses: + '200': + description: Success to report a action. + '400': + description: Bad Request. + content: + application/json: + schema: + description: Error's message. + type: string + '401': + description: Unauthorized. + content: + application/json: + schema: + description: Error's message. + type: string + '402': + description: Payment Required. + content: + application/json: + schema: + description: Error's message. + type: string + '403': + description: Forbidden. + content: + application/json: + schema: + description: Error's message. + type: string + '404': + description: Not found. + content: + application/json: + schema: + description: Error's message. + type: string + '424': + description: Failed dependency. + content: + application/json: + schema: + description: Error's message. + type: string + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@subscription.yaml b/openapi/spec/paths/api@billing@subscription.yaml new file mode 100644 index 00000000000..f13a18beec0 --- /dev/null +++ b/openapi/spec/paths/api@billing@subscription.yaml @@ -0,0 +1,109 @@ +post: + operationId: createSubscription + summary: Create subscription + description: Create a subscription. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success to create a new subscription. + '400': + $ref: ../components/schemas/billingError.yaml + '402': + $ref: ../components/schemas/billingError.yaml + '403': + $ref: ../components/schemas/billingError.yaml + '404': + $ref: ../components/schemas/billingError.yaml + '409': + $ref: ../components/schemas/billingError.yaml + '424': + $ref: ../components/schemas/billingError.yaml + '500': + $ref: ../components/responses/500.yaml +get: + operationId: getSubscription + summary: Get subscription + description: Get the subscription. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success to get a subscription. + content: + application/json: + schema: + type: object + properties: + id: + description: Subscription's ID. + type: string + example: 'sub_H9J5n2eZvKYlo2C7X1QX2Qg' + active: + description: Subscription's active. + type: boolean + example: true + status: + description: Subscription's status. + type: string + enum: + [ + 'inactive', + 'active', + 'trialing', + 'incomplete', + 'incomplete_expired', + 'past_due', + 'canceled', + 'unpaid', + 'paused', + 'to_cancel_at_end_of_period', + ] + example: 'active' + end_at: + description: Subscription's current period end. + type: integer + example: 31536000 + invoices: + description: Subscription's invoices. + type: array + items: + type: object + properties: + id: + description: Invoice's ID. + type: string + example: 'in_H9J5n2eZvKYlo2C7X1QX2Qg' + status: + description: Invoice's status. + type: string + enum: ['draft', 'open', 'paid', 'uncollectible', 'voided'] + example: 'open' + currency: + description: Invoice's currency. + type: string + enum: ['usd', 'brl'] + example: 'usd' + amount: + description: Invoice's amount in cents. + type: integer + example: 0 + + '402': + $ref: ../components/schemas/billingError.yaml + '403': + $ref: ../components/schemas/billingError.yaml + '404': + $ref: ../components/schemas/billingError.yaml + '424': + $ref: ../components/schemas/billingError.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@billing@{id}@payment-method.yaml b/openapi/spec/paths/api@billing@{id}@payment-method.yaml new file mode 100644 index 00000000000..abc81f220ef --- /dev/null +++ b/openapi/spec/paths/api@billing@{id}@payment-method.yaml @@ -0,0 +1,147 @@ +parameters: + - name: id + description: Payment method ID + schema: + type: string + required: true + in: path +post: + operationId: addPaymentMethod + summary: Add payment method + description: Add a new payment method. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success add a new payment method. + content: + application/json: + schema: + type: object + properties: + id: + description: Payment method's ID. + type: string + '400': + description: Bad request. + content: + application/json: + schema: + description: Error's message. + type: string + '403': + description: |- + Forbidden. + + - Invalid length + - Unathorized + content: + application/json: + schema: + description: Error's message. + type: string + '404': + description: |- + Not found. + + - Billing not found + - Payment method not found + content: + application/json: + schema: + description: Error's message. + type: string + '423': + description: Locked. + content: + application/json: + schema: + description: Error's message. + type: string + '500': + $ref: ../components/responses/500.yaml +patch: + operationId: updatePaymentMethod + summary: Update payment method + description: Update the payment method. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success to update the payment method. + content: + application/json: + schema: + type: object + properties: + id: + description: Payment method's ID. + type: string + '400': + description: Bad request. + content: + application/json: + schema: + description: Error's message. + type: string + '403': + description: Forbidden. + content: + application/json: + schema: + description: Error's message. + type: string + '404': + description: Not found. + content: + application/json: + schema: + description: Error's message. + type: string + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deattachPaymentMethod + summary: Deattach payment method + description: Deattach a payment method. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - billing + responses: + '200': + description: Success to deattach the payment method. + content: + application/json: + schema: + type: object + properties: + id: + description: Payment method's ID. + type: string + '403': + description: Forbidden. + content: + application/json: + schema: + description: Error's message. + type: string + '404': + description: Not found. + content: + application/json: + schema: + description: Error's message. + type: string + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@containers.yaml b/openapi/spec/paths/api@containers.yaml new file mode 100644 index 00000000000..a3c6917e799 --- /dev/null +++ b/openapi/spec/paths/api@containers.yaml @@ -0,0 +1,61 @@ +get: + operationId: getContainers + summary: Get containers + description: Get a list of containers. + tags: + - community + - containers + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/filterQuery.yaml + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + - name: status + description: Container's status + schema: + allOf: + - $ref: ../components/schemas/deviceStatus.yaml + default: accepted + required: false + in: query + - name: sort_by + description: Container's property to sort of + schema: + type: string + example: name + default: last_seen + required: false + in: query + - name: order_by + schema: + description: Container's list order + type: string + enum: + - asc + - desc + example: asc + default: desc + required: false + in: query + responses: + '200': + description: Success to get a list of containers. + headers: + X-Total-Count: + description: Containers' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/device.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@containers@{uid}.yaml b/openapi/spec/paths/api@containers@{uid}.yaml new file mode 100644 index 00000000000..66290402da9 --- /dev/null +++ b/openapi/spec/paths/api@containers@{uid}.yaml @@ -0,0 +1,79 @@ +parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml +get: + operationId: getContainer + summary: Get container + description: Get a container. + tags: + - community + - containers + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success get a container. + content: + application/json: + schema: + $ref: ../components/schemas/device.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteContainer + summary: Delete container + description: Delete a container. + tags: + - community + - containers + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to delete a container. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml +put: + operationId: updateContainer + summary: Update container + description: Update container's data. + tags: + - community + - containers + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + $ref: ../components/schemas/deviceName.yaml + public_url: + $ref: ../components/schemas/devicePublicURL.yaml + responses: + '200': + description: Success to update container's data. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@containers@{uid}@{status}.yaml b/openapi/spec/paths/api@containers@{uid}@{status}.yaml new file mode 100644 index 00000000000..d7f6077ff4b --- /dev/null +++ b/openapi/spec/paths/api@containers@{uid}@{status}.yaml @@ -0,0 +1,37 @@ +patch: + operationId: updateContainerStatus + summary: Update container status + description: Update container's status. + tags: + - community + - containers + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - name: status + description: Container's status + schema: + type: string + enum: + - accept + - reject + - pending + - unused + example: accept + required: true + in: path + responses: + '200': + description: Success to update container status. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '402': + $ref: ../components/responses/402.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices.yaml b/openapi/spec/paths/api@devices.yaml new file mode 100644 index 00000000000..6f5b23f7c46 --- /dev/null +++ b/openapi/spec/paths/api@devices.yaml @@ -0,0 +1,61 @@ +get: + operationId: getDevices + summary: Get devices + description: Get a list of devices. + tags: + - community + - devices + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/filterQuery.yaml + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + - name: status + description: Device's status + schema: + allOf: + - $ref: ../components/schemas/deviceStatus.yaml + default: accepted + required: false + in: query + - name: sort_by + description: Device's property to sort of + schema: + type: string + example: name + default: last_seen + required: false + in: query + - name: order_by + schema: + description: Device's list order + type: string + enum: + - asc + - desc + example: asc + default: desc + required: false + in: query + responses: + '200': + description: Success to get a list of devices. + headers: + X-Total-Count: + description: Devices' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/device.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@auth.yaml b/openapi/spec/paths/api@devices@auth.yaml new file mode 100644 index 00000000000..4013f22a46a --- /dev/null +++ b/openapi/spec/paths/api@devices@auth.yaml @@ -0,0 +1,81 @@ +post: + operationId: authDevice + summary: Auth device + description: | + Authenticate a ShellHub agent into the ShellHub server. + + Every 30 seconds, this route is hit by ShellHub agent to inform device availability. + tags: + - internal + - devices + security: + - jwt: [] + - api-key: [] + parameters: + - name: X-Real-IP + schema: + description: | + Device's IP address. + + + `X-Real-IP` header is used to set a geolocation position to device when + `geoip` feature is enable. + type: string + pattern: ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ + example: 127.0.0.1 + in: header + requestBody: + content: + application/json: + schema: + type: object + properties: + info: + $ref: ../components/schemas/deviceInfo.yaml + sessions: + type: array + items: + # NOTICE: what is session? + type: string + hostname: + type: string + pattern: ^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9]))*$ + example: device-hostname + identity: + $ref: ../components/schemas/deviceIdentity.yaml + public_key: + description: Device's public key in PEM format. + type: string + example: "-----BEGIN RSA PUBLIC KEY-----\nMIIBCgKCAQEA0vH2Bob3mn+uWVaHlOoZD8ai01W6VnRTnXlnHVF7Ny1Vb7pl1Hc4\nD8bsBhb1vt7aZOYHbCyDR2r5lsrWXCELE8pY8vzfFDA+jNrLbBCJ66E1BcmTqfXC\nJcLospWD2lIAwU2O7IPxwZujuVkHrF8nYuEFsKeG60QTWNS++RTqydqe2KmFMEdW\nCQmYPm/ykN871fSR9+PzoRJMYWidY6Szn+X2ardGmS/Ldhl/PEu9h7xjcQXANWz6\nyV/RVReGVkLcK6TxlfuxgdpbsWAx+cS52P7xWrshNefHqjpdlm3KNbo6vqfTpU8L\nd/FFISXXaa1Md5GyAHF+jzuRzQ5z5aKBGwIDAQAB\n-----END RSA PUBLIC KEY-----" + tenant_id: + $ref: ../components/schemas/namespaceTenantID.yaml + required: + - info + - sessions + - hostname + - public_key + - tenant_id + responses: + '200': + description: Success to auth device. + content: + application/json: + schema: + type: object + properties: + uid: + $ref: ../components/schemas/deviceUID.yaml + token: + $ref: ../components/schemas/deviceToken.yaml + name: + $ref: ../components/schemas/deviceName.yaml + namespace: + $ref: ../components/schemas/deviceNamespaceName.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@resolve.yaml b/openapi/spec/paths/api@devices@resolve.yaml new file mode 100644 index 00000000000..17493e1cfe1 --- /dev/null +++ b/openapi/spec/paths/api@devices@resolve.yaml @@ -0,0 +1,50 @@ +get: + operationId: resolveDevice + summary: Resolve Device + description: | + Retrieve a device using flexible resolution methods. The device can be + identified by either its unique identifier (UID) or hostname. The endpoint + automatically scopes results to the authenticated tenant's namespace for + security isolation. + + When both UID and hostname are provided, UID takes precedence over hostname. + tags: + - community + - devices + security: + - jwt: [] + - api-key: [] + parameters: + - name: hostname + in: query + required: false + description: The UID of the device + schema: + type: string + - name: uid + in: query + required: false + description: The hostname of the device + schema: + type: string + responses: + '200': + description: Device successfully retrieved + content: + application/json: + schema: + $ref: ../components/schemas/device.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + description: Device not found with the specified resolver + content: + application/json: + schema: + type: object + properties: + message: + type: string + example: 'Device not found' + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@{uid}.yaml b/openapi/spec/paths/api@devices@{uid}.yaml new file mode 100644 index 00000000000..b76ecf3ee77 --- /dev/null +++ b/openapi/spec/paths/api@devices@{uid}.yaml @@ -0,0 +1,79 @@ +parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml +get: + operationId: getDevice + summary: Get device + description: Get a device. + tags: + - community + - devices + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success get a device. + content: + application/json: + schema: + $ref: ../components/schemas/device.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteDevice + summary: Delete device + description: Delete a device. + tags: + - community + - devices + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to delete a device. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml +put: + operationId: updateDevice + summary: Update device + description: Update device's data. + tags: + - community + - devices + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + $ref: ../components/schemas/deviceName.yaml + public_url: + $ref: ../components/schemas/devicePublicURL.yaml + responses: + '200': + description: Success to update device's data. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@{uid}@accept.yaml b/openapi/spec/paths/api@devices@{uid}@accept.yaml new file mode 100644 index 00000000000..3ed35204283 --- /dev/null +++ b/openapi/spec/paths/api@devices@{uid}@accept.yaml @@ -0,0 +1,21 @@ +patch: + operationId: acceptDevice + summary: Accept device + description: Change device status to `accepted`. + tags: + - community + - devices + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml + responses: + '200': + description: Success to accept the device + '401': + $ref: ../components/responses/401.yaml + '402': + $ref: ../components/responses/402.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@{uid}@offline.yaml b/openapi/spec/paths/api@devices@{uid}@offline.yaml new file mode 100644 index 00000000000..943c2ab9a6b --- /dev/null +++ b/openapi/spec/paths/api@devices@{uid}@offline.yaml @@ -0,0 +1,26 @@ +post: + operationId: updateDeviceStatusOffline + summary: Update device status to offline + description: Update device's status to offiline. + tags: + - community + - devices + - internal + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml + responses: + '200': + description: Success to update device status to offline. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@{uid}@tags@{name}.yaml b/openapi/spec/paths/api@devices@{uid}@tags@{name}.yaml new file mode 100644 index 00000000000..d93e3dc6dcb --- /dev/null +++ b/openapi/spec/paths/api@devices@{uid}@tags@{name}.yaml @@ -0,0 +1,66 @@ +post: + operationId: pushTagToDevice + summary: Associate a tag with a device + description: Associates a tag with a device in the authenticated namespace. + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - name: name + description: Tag name to associate + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + responses: + '204': + description: Tag successfully associated with device + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: pullTagFromDevice + summary: Remove a tag from a device + description: Removes a tag from a device in the authenticated namespace. + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - name: name + description: Tag name to remove + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + responses: + '204': + description: Tag successfully removed from device + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@{uid}@tunnels.yaml b/openapi/spec/paths/api@devices@{uid}@tunnels.yaml new file mode 100644 index 00000000000..53a914062b1 --- /dev/null +++ b/openapi/spec/paths/api@devices@{uid}@tunnels.yaml @@ -0,0 +1,78 @@ +parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml +get: + operationId: listTunnels + summary: List tunnels + description: List the tunnels per devices. + deprecated: true + tags: + - cloud + - devices + - tunnels + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get the tunnels. + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/tunnel.yaml + '400': + $ref: ../components/responses/400.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createTunnel + summary: Create a tunnel + description: Creates a new tunnel for a device. + deprecated: true + tags: + - cloud + - devices + - tunnels + security: + - jwt: [] + - api-key: [] + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + host: + $ref: ../components/schemas/tunnelHost.yaml + port: + $ref: ../components/schemas/tunnelPort.yaml + ttl: + $ref: ../components/schemas/tunnelTTL.yaml + required: + - host + - port + - ttl + responses: + '200': + description: Tunnel created successfully. + content: + application/json: + schema: + $ref: ../components/schemas/tunnel.yaml + '400': + $ref: ../components/responses/400.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@{uid}@tunnels@{address}.yaml b/openapi/spec/paths/api@devices@{uid}@tunnels@{address}.yaml new file mode 100644 index 00000000000..e1aad28ec65 --- /dev/null +++ b/openapi/spec/paths/api@devices@{uid}@tunnels@{address}.yaml @@ -0,0 +1,26 @@ +parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - $ref: ../components/parameters/path/tunnelAddressPath.yaml +delete: + operationId: deleteTunnel + summary: Delete a tunnel + description: Deletes a tunnel for a specific device and port. + deprecated: true + tags: + - cloud + - devices + - tunnels + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Tunnel deleted successfully. + '400': + $ref: ../components/responses/400.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@devices@{uid}@{status}.yaml b/openapi/spec/paths/api@devices@{uid}@{status}.yaml new file mode 100644 index 00000000000..bed3b3bdebd --- /dev/null +++ b/openapi/spec/paths/api@devices@{uid}@{status}.yaml @@ -0,0 +1,37 @@ +patch: + operationId: updateDeviceStatus + summary: Update device status + description: Update device's status. + tags: + - community + - devices + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - name: status + description: Device's status + schema: + type: string + enum: + - accept + - reject + - pending + - unused + example: accept + required: true + in: path + responses: + '200': + description: Success to update device status. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '402': + $ref: ../components/responses/402.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@firewall@rules.yaml b/openapi/spec/paths/api@firewall@rules.yaml new file mode 100644 index 00000000000..8a58fcceea3 --- /dev/null +++ b/openapi/spec/paths/api@firewall@rules.yaml @@ -0,0 +1,69 @@ +post: + operationId: createFirewallRule + summary: Create firewall rule + description: Create a firewall rule. + tags: + - cloud + + - rules + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesRequest.yaml + responses: + '200': + description: Success to create firewall rule. + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesResponse.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +get: + operationId: getFirewallRules + summary: Get firewall rules + description: Get a list of firewall rules. + tags: + - cloud + + - rules + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get firewall rules. + headers: + X-Total-Count: + description: Firewall rules' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/firewallRulesResponse.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@firewall@rules@{id}.yaml b/openapi/spec/paths/api@firewall@rules@{id}.yaml new file mode 100644 index 00000000000..5931266058a --- /dev/null +++ b/openapi/spec/paths/api@firewall@rules@{id}.yaml @@ -0,0 +1,79 @@ +parameters: + - name: id + schema: + description: Firewall rule's ID + type: string + in: path + required: true +get: + operationId: getFirewallRule + summary: Get firewall rule + description: Get a firewall rule. + tags: + - cloud + - rules + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get firewall rule. + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesResponse.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +put: + operationId: updateFirewallRule + summary: Update firewall rule + description: Update a firewall rule. + tags: + - cloud + - rules + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesRequest.yaml + responses: + '200': + description: Success to update firewall rule. + content: + application/json: + schema: + $ref: ../components/schemas/firewallRulesResponse.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteFirewallRule + summary: Delete firewall rule + description: Delete a firewall rule. + tags: + - cloud + - rules + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to delete a firewall rule. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@info.yaml b/openapi/spec/paths/api@info.yaml new file mode 100644 index 00000000000..073d0aaacf4 --- /dev/null +++ b/openapi/spec/paths/api@info.yaml @@ -0,0 +1,26 @@ +get: + operationId: getInfo + summary: Get info + description: Get information about ShellHub instance like version, SSH and API addresses. + security: [] + tags: + - community + - system + parameters: + - name: agent_version + description: Agent's version. This parameter is used to filter instance information based on the requesting agent's version. + schema: + type: string + required: false + in: query + responses: + '200': + description: Success to get ShellHub instance info. + content: + application/json: + schema: + $ref: ../components/schemas/info.yaml + '400': + $ref: ../components/responses/400.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@login.yaml b/openapi/spec/paths/api@login.yaml new file mode 100644 index 00000000000..93b0bbb0465 --- /dev/null +++ b/openapi/spec/paths/api@login.yaml @@ -0,0 +1,83 @@ +post: + operationId: login + summary: Login + description: | + Authenticate a "local" user by returning the session's JWT token and user + data. Local users are those registered via the ShellHub form without + relying on external Identity Providers (IdPs). + + Authentication may result in an account lockout after N consecutive + incorrect login attempts. The lockout applies specifically to a particular + source and user combination. Check for the presence of the + `X-Account-Lockout` header to determine the account lockout status. When + it's 0, there are no active lockouts. + + Users with MFA enabled cannot authenticate via this route. In such cases, + the API will respond with a status `401` and an `X-MFA-Token` header with a + UUID. Authentication must be med to `/api/mfa/auth` with this token in + these instances. + tags: + - community + - external + - users + security: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + username: + $ref: ../components/schemas/userUsername.yaml + password: + $ref: ../components/schemas/userPassword.yaml + required: + - username + - password + responses: + '200': + description: Success to login. + headers: + X-Account-Lockout: + schema: + type: integer + description: Indicates the Unix timestamp in seconds of when the account lockout period ends. + example: 0 + X-MFA-Token: + schema: + type: string + description: Indicates whether the user has MFA enabled, which blocks authentication through this route. + example: '' + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + description: Unauthorized + headers: + X-MFA-Token: + schema: + type: string + description: Indicates whether the user has MFA enabled, which blocks authentication through this route. + example: 'bf265bf8-0065-4f44-a3ac-55eb3134c6ec' + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '429': + description: Too Many Requests + headers: + X-Account-Lockout: + schema: + type: integer + description: Indicates the Unix timestamp in seconds of when the account lockout period ends. + example: 1711575401 + X-MFA-Token: + schema: + type: string + description: Indicates whether the user has MFA enabled, which blocks authentication through this route. + example: '' + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces.yaml b/openapi/spec/paths/api@namespaces.yaml new file mode 100644 index 00000000000..f1d39b933cf --- /dev/null +++ b/openapi/spec/paths/api@namespaces.yaml @@ -0,0 +1,101 @@ +get: + operationId: getNamespaces + summary: Get namespaces list + description: Returns a list of namespaces. + tags: + - community + - namespaces + security: + - jwt: [] + - api-key: [] + parameters: + - name: filter + description: | + Namespaces's filter. + + + Filter field receives a base64 enconded JSON object for limit a search. + The JSON object should have a property called `type`, it will filter by a `property` called `name` where the value should `contains` `examplespace`. + + If you want get only Namespaces name as `examplespace`, the JSON object will looks + like this + + ```json + [ + { + "type":"property", + "params":{ + "name":"name", + "operator":"contains", + "value":"examplespace" + } + } + ] + ``` + + So, the output encoded string will result on: + `W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lIiwib3BlcmF0b3IiOiJjb250YWlucyIsInZhbHVlIjoiZXhhbXBsZXNwYWNlIn19XQ==` + schema: + type: string + pattern: ^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$ + example: W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lIiwib3BlcmF0b3IiOiJjb250YWlucyIsInZhbHVlIjoiZXhhbXBsZXNwYWNlIn19XQ== + required: false + in: query + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get a namespace list. + headers: + X-Total-Count: + description: Namespaces' total number. + schema: + type: string + minimum: 0 + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/namespace.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createNamespace + summary: Create namespace + description: Create a namespace. + tags: + - community + - namespaces + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + $ref: ../components/schemas/namespaceName.yaml + required: + - name + responses: + '200': + description: Success to create a namespace. + content: + application/json: + schema: + $ref: ../components/schemas/namespace.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@api-key.yaml b/openapi/spec/paths/api@namespaces@api-key.yaml new file mode 100644 index 00000000000..00d1fa4445c --- /dev/null +++ b/openapi/spec/paths/api@namespaces@api-key.yaml @@ -0,0 +1,90 @@ +post: + operationId: apiKeyCreate + summary: Creates an API key. + description: | + The `created_by`, `tenant_id`, and `role` (unless provided in the request + body) values will be obtained from the JWT token. + tags: + - api-keys + - community + - namespaces + security: + - jwt: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/apiKeyCreate.yaml + responses: + '200': + description: Success. + content: + application/json: + schema: + $ref: ../components/schemas/apiKeyWithID.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml +get: + operationId: apiKeyList + summary: List API Keys + tags: + - api-keys + - community + - namespaces + security: + - jwt: [] + - api-key: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + - name: order_by + schema: + description: The list order. + type: string + enum: + - asc + - desc + default: desc + required: false + in: query + - name: sort_by + description: The property to sort of. + schema: + type: string + example: name + default: expires_in + required: false + in: query + responses: + '200': + description: Success. + headers: + X-Total-Count: + description: Total matched documents. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/apiKey.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@api-key@{key}.yaml b/openapi/spec/paths/api@namespaces@api-key@{key}.yaml new file mode 100644 index 00000000000..e03f492d4fe --- /dev/null +++ b/openapi/spec/paths/api@namespaces@api-key@{key}.yaml @@ -0,0 +1,63 @@ +patch: + summary: Update an API key + operationId: apiKeyUpdate + tags: + - api-keys + - community + - namespaces + security: + - jwt: [] + parameters: + - name: key + schema: + type: string + description: The API key name. + example: dev + required: true + in: path + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/apiKeyUpdate.yaml + responses: + '200': + description: Success + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + summary: Delete an API key + operationId: apiKeyDelete + tags: + - api-keys + - community + - namespaces + security: + - jwt: [] + parameters: + - name: key + schema: + type: string + description: The API key name. + example: dev + required: true + in: path + responses: + '200': + description: Success + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}.yaml b/openapi/spec/paths/api@namespaces@{tenant}.yaml new file mode 100644 index 00000000000..7675d80dbf4 --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}.yaml @@ -0,0 +1,87 @@ +parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml +get: + operationId: getNamespace + summary: Get a namespace + description: Get a namespace. + tags: + - community + - namespaces + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get a namespace. + content: + application/json: + schema: + $ref: ../components/schemas/namespace.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +put: + operationId: editNamespace + summary: Edit namespace + description: Edit a namespace. + tags: + - community + - namespaces + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + $ref: ../components/schemas/namespaceName.yaml + settings: + $ref: ../components/schemas/namespaceSettings.yaml + responses: + '200': + description: Success to edit a namespace. + content: + application/json: + schema: + $ref: ../components/schemas/namespace.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteNamespace + summary: Delete namespace + description: Delete a namespace. + tags: + - community + - namespaces + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to delete a namespace. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@containers@{uid}@tags@{name}.yaml b/openapi/spec/paths/api@namespaces@{tenant}@containers@{uid}@tags@{name}.yaml new file mode 100644 index 00000000000..dfbb0c6dda5 --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@containers@{uid}@tags@{name}.yaml @@ -0,0 +1,66 @@ +post: + operationId: pushTagToContainer + summary: Associate a tag with a container + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - name: name + description: Tag name to associate + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + responses: + '204': + description: Tag successfully associated with container + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: pullTagFromContainer + summary: Remove a tag from a container + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - name: name + description: Tag name to remove + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + responses: + '204': + description: Tag successfully removed from container + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@devices@{uid}@tags@{name}.yaml b/openapi/spec/paths/api@namespaces@{tenant}@devices@{uid}@tags@{name}.yaml new file mode 100644 index 00000000000..270c66d5ac5 --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@devices@{uid}@tags@{name}.yaml @@ -0,0 +1,70 @@ +post: + operationId: pushTagToDeviceDeprecated + deprecated: true + summary: Associate a tag with a device + description: '**Deprecated**: Use `POST /api/devices/{uid}/tags/{name}` instead.' + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - name: name + description: Tag name to associate + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + responses: + '204': + description: Tag successfully associated with device + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: pullTagFromDeviceDeprecated + deprecated: true + summary: Remove a tag from a device + description: '**Deprecated**: Use `DELETE /api/devices/{uid}/tags/{name}` instead.' + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - $ref: ../components/parameters/path/deviceUIDPath.yaml + - name: name + description: Tag name to remove + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + responses: + '204': + description: Tag successfully removed from device + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@invitations.yaml b/openapi/spec/paths/api@namespaces@{tenant}@invitations.yaml new file mode 100644 index 00000000000..b626cc66b62 --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@invitations.yaml @@ -0,0 +1,48 @@ +get: + operationId: getNamespaceMembershipInvitationList + summary: Get membership invitations for a namespace + description: | + Returns a paginated list of membership invitations for the specified namespace. + This endpoint allows namespace administrators to view all pending invitations. + tags: + - cloud + - members + - namespaces + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - name: filter + description: | + Membership invitations filter. + + Filter field receives a base64 encoded JSON object to limit the search. + schema: + type: string + required: false + in: query + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Successfully retrieved namespace membership invitations list. + headers: + X-Total-Count: + description: Total number of membership invitations. + schema: + type: integer + minimum: 0 + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/membershipInvitation.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@invitations@accept.yaml b/openapi/spec/paths/api@namespaces@{tenant}@invitations@accept.yaml new file mode 100644 index 00000000000..abb9c12a51d --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@invitations@accept.yaml @@ -0,0 +1,28 @@ +patch: + operationId: acceptInvite + summary: Accept a membership invite + description: | + Accepts a pending membership invitation for the authenticated user. + The user must be logged into the account that was invited. + tags: + - cloud + - members + - namespaces + - users + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + responses: + '200': + description: Invitation successfully accepted + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@invitations@decline.yaml b/openapi/spec/paths/api@namespaces@{tenant}@invitations@decline.yaml new file mode 100644 index 00000000000..bbf7a524dda --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@invitations@decline.yaml @@ -0,0 +1,29 @@ +patch: + operationId: declineInvite + summary: Decline a membership invite + description: | + Declines a pending membership invitation for the authenticated user. + The user must be logged into the account that was invited. + The invitation status will be updated to "rejected". + tags: + - cloud + - members + - namespaces + - users + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + responses: + '200': + description: Invitation successfully declined + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@invitations@links.yaml b/openapi/spec/paths/api@namespaces@{tenant}@invitations@links.yaml new file mode 100644 index 00000000000..f5c2146294f --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@invitations@links.yaml @@ -0,0 +1,58 @@ +post: + operationId: generateInvitationLink + summary: Generate an invitation link for a namespace member + description: | + Generates a unique invitation link to invite a member to a namespace using + their email. Each invitation link is unique and tied to the provided email. + Upon accepting the invitation, the user's status will automatically be set + to `accepted`. If the user associated with the email does not exist, the + invitation link will redirect them to the signup page. + + The invitation remains valid for **7 days**. + tags: + - cloud + - members + - namespaces + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + requestBody: + content: + application/json: + schema: + type: object + properties: + email: + description: The email of the member. + type: string + example: john.doe@test.com + role: + $ref: ../components/schemas/namespaceMemberRole.yaml + required: + - email + - role + responses: + '200': + description: Success to generate a invitation link. + content: + application/json: + schema: + type: object + properties: + link: + description: The invitation link. + type: string + example: https://cloud.shellhub.io/accept-invite?query=... + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '403': + $ref: ../components/responses/403.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@invitations@{user-id}.yaml b/openapi/spec/paths/api@namespaces@{tenant}@invitations@{user-id}.yaml new file mode 100644 index 00000000000..f1ba34f343b --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@invitations@{user-id}.yaml @@ -0,0 +1,77 @@ +patch: + operationId: updateMembershipInvitation + summary: Update a pending membership invitation + description: | + Allows namespace administrators to update a pending membership invitation. + Currently supports updating the role assigned to the invitation. + The active user must have authority over the role being assigned. + tags: + - cloud + - members + - namespaces + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - name: user-id + description: The ID of the invited user + schema: + type: string + required: true + in: path + requestBody: + content: + application/json: + schema: + type: object + properties: + role: + $ref: ../components/schemas/namespaceMemberRole.yaml + responses: + '200': + description: Invitation successfully updated + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml + +delete: + operationId: cancelMembershipInvitation + summary: Cancel a pending membership invitation + description: | + Allows namespace administrators to cancel a pending membership invitation. + The invitation status will be updated to "cancelled". + The active user must have authority over the role of the invitation being cancelled. + tags: + - cloud + - members + - namespaces + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - name: user-id + description: The ID of the invited user + schema: + type: string + required: true + in: path + responses: + '200': + description: Invitation successfully cancelled + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@members.yaml b/openapi/spec/paths/api@namespaces@{tenant}@members.yaml new file mode 100644 index 00000000000..feb9127a4ab --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@members.yaml @@ -0,0 +1,87 @@ +delete: + operationId: leaveNamespace + summary: Leave Namespace + description: | + Allows the authenticated user to leave the specified namespace. Owners + cannot leave a namespace; they must delete it instead. If the user attempts + to leave their current authenticated namespace, the response will provide a + new token that excludes this namespace. + tags: + - community + - members + - namespaces + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + responses: + '200': + description: Successfully left the namespace. + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: addNamespaceMember + summary: Invite member + description: | + Invites a member to a namespace. + + In enterprise and community instances, the member will automatically accept + the invite and will have an `accepted` status. + + In cloud instances, the member will have a `pending` status until they + accept the invite via an email sent to them. The invite is valid for **7 days**. + If the member was previously invited and the invite is no longer valid, the same route + will resend the invite. + tags: + - community + - members + - namespaces + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + requestBody: + content: + application/json: + schema: + type: object + properties: + email: + description: The email of the member. + type: string + example: john.doe@test.com + role: + $ref: ../components/schemas/namespaceMemberRole.yaml + required: + - email + - role + responses: + '200': + description: Success to add a member to a namespace. + content: + application/json: + schema: + $ref: ../components/schemas/namespace.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '403': + $ref: ../components/responses/403.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@members@{id}@accept-invite.yaml b/openapi/spec/paths/api@namespaces@{tenant}@members@{id}@accept-invite.yaml new file mode 100644 index 00000000000..15be00392dd --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@members@{id}@accept-invite.yaml @@ -0,0 +1,47 @@ +get: + operationId: lookupUserStatus + summary: Lookup User's Status + description: | + Clients may need to check a user's status before deciding whether to redirect + to the accept-invite workflow or to the signup process. It is intended for use + exclusively by clients in the `invite-member` pipeline. + tags: + - namespaces + - members + - namespaces + parameters: + - name: tenant + in: path + required: true + description: The tenant ID of the namespace. + schema: + type: string + - name: id + in: path + required: true + description: The user's ID. + schema: + type: string + - name: sig + in: query + required: true + description: The signature included in the email. This is used instead of the user's token to authenticate the request. + schema: + type: string + responses: + '200': + description: Success + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: invited + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@members@{uid}.yaml b/openapi/spec/paths/api@namespaces@{tenant}@members@{uid}.yaml new file mode 100644 index 00000000000..f47f16959b7 --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@members@{uid}.yaml @@ -0,0 +1,58 @@ +delete: + operationId: removeNamespaceMember + summary: Remove a member from a namespace + description: Remove a member from a namespace. + tags: + - community + - namespaces + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - $ref: ../components/parameters/path/namespaceMemberIDPath.yaml + responses: + '200': + description: Success to delete a member from namespace. + content: + application/json: + schema: + $ref: ../components/schemas/namespace.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +patch: + operationId: updateNamespaceMember + summary: Update a member from a namespace + description: Update a member role from a namespace. + tags: + - community + - namespaces + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - $ref: ../components/parameters/path/namespaceMemberIDPath.yaml + requestBody: + content: + application/json: + schema: + type: object + properties: + role: + $ref: ../components/schemas/namespaceMemberRole.yaml + responses: + '200': + description: Success to update member role from a namespace. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@support.yaml b/openapi/spec/paths/api@namespaces@{tenant}@support.yaml new file mode 100644 index 00000000000..4b4bad2c734 --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@support.yaml @@ -0,0 +1,29 @@ +parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml +get: + operationId: getNamespaceSupport + summary: Get a namespace support identifier. + description: Get a namespace support identifier. + tags: + - cloud + - namespaces + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get a namespace support identifier. + content: + application/json: + schema: + $ref: ../components/schemas/support.yaml + '401': + $ref: ../components/responses/401.yaml + '402': + $ref: ../components/responses/402.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@tags.yaml b/openapi/spec/paths/api@namespaces@{tenant}@tags.yaml new file mode 100644 index 00000000000..438e9d0c0f5 --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@tags.yaml @@ -0,0 +1,81 @@ +get: + operationId: getTagsDeprecated + deprecated: true + summary: Retrieve all tags associated with a namespace + description: '**Deprecated**: Use `GET /api/tags` instead.' + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - $ref: ../components/parameters/query/filterQuery.yaml + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get tag list. + headers: + X-Total-Count: + description: Tags' total number. + schema: + type: string + minimum: 0 + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/tag.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createTagDeprecated + deprecated: true + summary: Create a new tag in the namespace + description: '**Deprecated**: Use `POST /api/tags` instead. Creates a tag that can be later associated with content. Tag names must be unique within the namespace.' + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - name + properties: + name: + type: string + minLength: 3 + maxLength: 255 + example: dev + responses: + '201': + description: Tag successfully created + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@namespaces@{tenant}@tags@{name}.yaml b/openapi/spec/paths/api@namespaces@{tenant}@tags@{name}.yaml new file mode 100644 index 00000000000..497a981e76c --- /dev/null +++ b/openapi/spec/paths/api@namespaces@{tenant}@tags@{name}.yaml @@ -0,0 +1,90 @@ +patch: + operationId: updateTagDeprecated + deprecated: true + summary: Update a tag + description: '**Deprecated**: Use `PATCH /api/tags/{name}` instead.' + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - name: name + description: Current tag name + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - name + properties: + name: + type: string + minLength: 3 + maxLength: 255 + pattern: ^[a-zA-Z0-9-_]+$ + example: dev + description: New tag name + responses: + '200': + description: Tag successfully updated + content: + application/json: + schema: + $ref: ../components/schemas/tag.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteTagDeprecated + deprecated: true + summary: Delete a tag + description: '**Deprecated**: Use `DELETE /api/tags/{name}` instead. Removes a tag and all its associations.' + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + - name: name + description: Tag name to delete + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + responses: + '204': + description: Tag successfully deleted + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@register.yaml b/openapi/spec/paths/api@register.yaml new file mode 100644 index 00000000000..9463873ba0a --- /dev/null +++ b/openapi/spec/paths/api@register.yaml @@ -0,0 +1,70 @@ +post: + operationId: registerUser + summary: Register a new user + tags: + - cloud + - users + security: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + type: string + example: John Doe + description: The full name of the user. + email: + type: string + example: john.doe@test.com + description: | + The user's email address, which must be unique. This email will + be used for login and for receiving important notifications, + such as password reset emails. If `email_marketing` is set to + `true`, promotional emails will also be sent to this address. + username: + type: string + example: john_doe + description: | + The username, which must be unique across the system. Users can + log in using either their username or email. + password: + type: string + format: ^([a-zA-Z0-9_-]){1,64}$ + example: mS@aZ%n267M@3&k!H46^#78s!@$F4^@7 + description: | + The password for the user account. Must follow the regex. + email_marketing: + type: boolean + example: true + description: | + Indicates whether the user opts to receive marketing and + promotional emails. + sig: + type: string + example: '' + description: | + **For standard registration processes, this field should be + ignored.** + + A unique signature included in an invitation email. This is + used to automatically confirm the user's registration without + requiring an additional confirmation email. + required: + - name + - email + - username + - password + - email_marketing + responses: + '200': + description: User registered successfully + '400': + $ref: ../components/responses/invalidFields.yaml + '401': + $ref: ../components/responses/401.yaml + '409': + $ref: ../components/responses/conflictFields.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@sessions.yaml b/openapi/spec/paths/api@sessions.yaml new file mode 100644 index 00000000000..c79a1313a14 --- /dev/null +++ b/openapi/spec/paths/api@sessions.yaml @@ -0,0 +1,33 @@ +get: + operationId: getSessions + summary: Get sessions + description: Get a list sessions. + tags: + - community + - sessions + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get list of sessions. + headers: + X-Total-Count: + description: Sessions' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/session.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@sessions@{uid}.yaml b/openapi/spec/paths/api@sessions@{uid}.yaml new file mode 100644 index 00000000000..bce7f0e6f5f --- /dev/null +++ b/openapi/spec/paths/api@sessions@{uid}.yaml @@ -0,0 +1,49 @@ +parameters: + - $ref: ../components/parameters/path/sessionUIDPath.yaml +get: + operationId: getSession + summary: Get session + description: Get a session. + tags: + - community + - sessions + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get a session. + content: + application/json: + schema: + $ref: ../components/schemas/session.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: setSessionAuthenticationStatus + summary: Set session authentication status + description: Set session authentication status. + tags: + - community + - sessions + security: + - jwt: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + authenticated: + description: Session's authentication status. + type: boolean + responses: + '200': + $ref: ../components/responses/200.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@sessions@{uid}@close.yaml b/openapi/spec/paths/api@sessions@{uid}@close.yaml new file mode 100644 index 00000000000..00ac9e5782f --- /dev/null +++ b/openapi/spec/paths/api@sessions@{uid}@close.yaml @@ -0,0 +1,30 @@ +post: + operationId: clsoeSession + summary: Close session + description: Close a session. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + + - sessions + parameters: + - $ref: ../components/parameters/path/sessionUIDPath.yaml + requestBody: + content: + application/json: + schema: + type: object + properties: + device: + $ref: ../components/schemas/deviceUID.yaml + required: + - device + responses: + '200': + description: Success to close session. + '400': + $ref: ../components/responses/400.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@sessions@{uid}@records.yaml b/openapi/spec/paths/api@sessions@{uid}@records.yaml new file mode 100644 index 00000000000..7a410abd5a7 --- /dev/null +++ b/openapi/spec/paths/api@sessions@{uid}@records.yaml @@ -0,0 +1,31 @@ +parameters: + - $ref: ../components/parameters/path/sessionUIDPath.yaml +get: + operationId: listSessionRecords + summary: List session records + description: List session records + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - sessions + parameters: + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to list the session records. + headers: + X-Total-Count: + description: Session records' total number. + schema: + type: string + minimum: 0 + readOnly: true + '400': + $ref: ../components/responses/400.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@sessions@{uid}@records@{seat}.yaml b/openapi/spec/paths/api@sessions@{uid}@records@{seat}.yaml new file mode 100644 index 00000000000..e8852efe9b0 --- /dev/null +++ b/openapi/spec/paths/api@sessions@{uid}@records@{seat}.yaml @@ -0,0 +1,104 @@ +parameters: + - $ref: ../components/parameters/path/sessionUIDPath.yaml + - name: seat + schema: + description: Record's seat + type: integer + in: path + required: true +post: + operationId: recordSession + summary: Record session + description: Record data about session session. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - sessions + requestBody: + content: + application/json: + schema: + type: object + properties: + uid: + description: Session's UID. + type: string + seat: + description: Session's Seat + type: integer + message: + description: Session's data. + type: string + width: + description: Session's pty width. + type: integer + height: + description: Session's pty height. + type: integer + required: + - uid + - seat + - message + - width + - height + responses: + '200': + description: Success to record session. + '400': + $ref: ../components/responses/400.yaml + '422': + description: Unprocessable Entity + content: + application/json: + schema: + type: object + properties: + message: + description: Error's message. + type: string + '500': + $ref: ../components/responses/500.yaml +get: + operationId: getSessionRecord + summary: Get session record + description: Get a session record based on its seat. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - sessions + responses: + '200': + description: Success to play a session's record. + content: + application/json: + schema: + $ref: ../components/schemas/recordedSessionResponse.yaml + '400': + $ref: ../components/responses/400.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteSessionRecord + summary: Delete session record + description: Deletes a session record based on its seat. + security: + - jwt: [] + - api-key: [] + tags: + - cloud + - sessions + responses: + '200': + description: Success to delete a session's record. + '400': + $ref: ../components/responses/400.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@setup.yaml b/openapi/spec/paths/api@setup.yaml new file mode 100644 index 00000000000..0a2b1721af1 --- /dev/null +++ b/openapi/spec/paths/api@setup.yaml @@ -0,0 +1,43 @@ +post: + operationId: setup + summary: User setup + description: Register an user and create namespace with the same name as username + security: [] + tags: + - community + - system + parameters: + - name: sign + description: 'Signature used to validate request origin generated by running `./bin/setup` script' + schema: + type: string + required: true + in: query + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + $ref: ../components/schemas/userName.yaml + email: + $ref: ../components/schemas/userEmail.yaml + username: + $ref: ../components/schemas/userUsername.yaml + password: + $ref: ../components/schemas/userPassword.yaml + required: + - name + - email + - username + - password + responses: + '200': + description: Success to register user on setup. + '409': + $ref: ../components/responses/409.yaml + '400': + $ref: ../components/responses/400.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@sshkeys@public-keys.yaml b/openapi/spec/paths/api@sshkeys@public-keys.yaml new file mode 100644 index 00000000000..07460991bc8 --- /dev/null +++ b/openapi/spec/paths/api@sshkeys@public-keys.yaml @@ -0,0 +1,84 @@ +get: + operationId: getPublicKeys + summary: Get public keys + description: Get a list from all public keys. + tags: + - community + - ssh + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/filterQuery.yaml + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get a list of public keys. + headers: + X-Total-Count: + description: Public keys' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/publicKeyResponse.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createPublicKey + summary: Create public key + description: Create a new public key. + tags: + - community + - ssh + security: + - jwt: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/publicKeyRequest.yaml + responses: + '200': + description: Success to create a public key. + content: + application/json: + schema: + type: object + properties: + data: + $ref: ../components/schemas/publicKeyData.yaml + fingerprint: + $ref: ../components/schemas/publickKeyFingerprint.yaml + tenant_id: + $ref: ../components/schemas/namespaceTenantID.yaml + name: + description: Public key's name. + type: string + example: example + filter: + $ref: ../components/schemas/publicKeyFilter.yaml + username: + $ref: ../components/schemas/publicKeyUsername.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '422': + description: UnprocessableEntity + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@sshkeys@public-keys@{fingerprint}.yaml b/openapi/spec/paths/api@sshkeys@public-keys@{fingerprint}.yaml new file mode 100644 index 00000000000..5685348e57d --- /dev/null +++ b/openapi/spec/paths/api@sshkeys@public-keys@{fingerprint}.yaml @@ -0,0 +1,68 @@ +parameters: + - $ref: ../components/parameters/path/publicKeyFingerprintPath.yaml +put: + operationId: updatePublicKey + summary: Update public key + description: Update a public key. + tags: + - community + - ssh + security: + - jwt: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + description: Public key's name. + type: string + example: example + username: + description: Public key's username. + type: string + example: example + filter: + $ref: ../components/schemas/publicKeyFilter.yaml + required: + - name + - username + - filter + responses: + '200': + description: Success to update a public key. + content: + application/json: + schema: + $ref: ../components/schemas/publicKeyResponse.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deletePublicKey + summary: Delete public key + description: Delete a public key. + tags: + - community + - ssh + security: + - jwt: [] + responses: + '200': + description: Success to delete a public key. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@stats.yaml b/openapi/spec/paths/api@stats.yaml new file mode 100644 index 00000000000..ac54cc7a3ac --- /dev/null +++ b/openapi/spec/paths/api@stats.yaml @@ -0,0 +1,42 @@ +get: + operationId: getStatusDevices + summary: Get stats ShellHub instance + description: Get stats ShellHub instance. + tags: + - community + - devices + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success get stats from ShellHub instance. + content: + application/json: + schema: + type: object + properties: + registered_devices: + description: Number of registered devices + type: integer + minimum: 0 + online_devices: + description: Number of online devices + type: integer + minimum: 0 + pending_devices: + description: Number of pending devices + type: integer + minimum: 0 + rejected_devices: + description: Number of rejected devices + type: integer + minimum: 0 + active_sessions: + description: Active sessions + type: integer + minimum: 0 + '401': + description: Unauthorized + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@tags.yaml b/openapi/spec/paths/api@tags.yaml new file mode 100644 index 00000000000..7bbfa8f5ca2 --- /dev/null +++ b/openapi/spec/paths/api@tags.yaml @@ -0,0 +1,76 @@ +get: + operationId: getTags + summary: Retrieve all tags associated with the namespace + description: Retrieves all tags for the authenticated namespace. + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - $ref: ../components/parameters/query/filterQuery.yaml + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Success to get tag list. + headers: + X-Total-Count: + description: Tags' total number. + schema: + type: string + minimum: 0 + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/tag.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createTag + summary: Create a new tag in the namespace + description: Creates a tag in the authenticated namespace that can be later associated with content. Tag names must be unique within the namespace. + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - name + properties: + name: + type: string + minLength: 3 + maxLength: 255 + example: dev + responses: + '201': + description: Tag successfully created + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@tags@{name}.yaml b/openapi/spec/paths/api@tags@{name}.yaml new file mode 100644 index 00000000000..02624dd90fe --- /dev/null +++ b/openapi/spec/paths/api@tags@{name}.yaml @@ -0,0 +1,86 @@ +patch: + operationId: updateTag + summary: Update a tag + description: Updates a tag in the authenticated namespace. + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - name: name + description: Current tag name + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - name + properties: + name: + type: string + minLength: 3 + maxLength: 255 + pattern: ^[a-zA-Z0-9-_]+$ + example: dev + description: New tag name + responses: + '200': + description: Tag successfully updated + content: + application/json: + schema: + $ref: ../components/schemas/tag.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/409.yaml + '500': + $ref: ../components/responses/500.yaml +delete: + operationId: deleteTag + summary: Delete a tag + description: Removes a tag and all its associations from the authenticated namespace. + tags: + - community + - tags + security: + - jwt: [] + - api-key: [] + parameters: + - name: name + description: Tag name to delete + schema: + type: string + pattern: ^[a-zA-Z0-9-_]+$ + example: prod + required: true + in: path + responses: + '204': + description: Tag successfully deleted + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@token@{tenant}.yaml b/openapi/spec/paths/api@token@{tenant}.yaml new file mode 100644 index 00000000000..136ef831d76 --- /dev/null +++ b/openapi/spec/paths/api@token@{tenant}.yaml @@ -0,0 +1,27 @@ +get: + operationId: getToken + summary: Get token + description: Get a token from its tenant. + tags: + - community + - users + security: + - jwt: [] + parameters: + - name: tenant + description: Tenant + schema: + type: string + required: true + in: path + responses: + '200': + description: Success get token + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user.yaml b/openapi/spec/paths/api@user.yaml new file mode 100644 index 00000000000..5966d1ba86a --- /dev/null +++ b/openapi/spec/paths/api@user.yaml @@ -0,0 +1,24 @@ +delete: + operationId: deleteUser + summary: Delete user + description: | + Deletes the authenticated user. The user will be removed from any + namespaces they are a member of. Users who are owners of namespaces cannot + be deleted. In such cases, the user must delete the namespace(s) first. + + > NOTE: This route is available only for **cloud** instances. Enterprise + users must use the admin console, and community users must use the CLI. + tags: + - users + - cloud + security: + - jwt: [] + responses: + '204': + description: Success to delete the user + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@mfa@auth.yaml b/openapi/spec/paths/api@user@mfa@auth.yaml new file mode 100644 index 00000000000..88bb546f23b --- /dev/null +++ b/openapi/spec/paths/api@user@mfa@auth.yaml @@ -0,0 +1,32 @@ +post: + operationId: authMFA + summary: Auth MFA + description: | + Authenticate a user who has MFA enabled. This endpoint should be called after the default authUser endpoint, + which generates an `X-MFA-Token` indicating that the user has already authenticated with a password. + tags: + - cloud + - mfa + - users + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/mfaAuth.yaml + responses: + '200': + description: Success to authenticate. + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@mfa@disable.yaml b/openapi/spec/paths/api@user@mfa@disable.yaml new file mode 100644 index 00000000000..6a7eb9fbf9e --- /dev/null +++ b/openapi/spec/paths/api@user@mfa@disable.yaml @@ -0,0 +1,33 @@ +put: + operationId: disableMFA + summary: Disable MFA + description: | + Disable MFA for a user. To disable MFA, the user must provide either + a recovery code or the current MFA code. If a recovery code is used, it will be invalidated for future use. + + The recovery code used to regain access to the account can be used within a 10-minute window on this + endpoint. + tags: + - cloud + - mfa + - users + security: + - jwt: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/mfaDisable.yaml + responses: + '200': + description: Success to disable a MFA status. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@mfa@enable.yaml b/openapi/spec/paths/api@user@mfa@enable.yaml new file mode 100644 index 00000000000..fad3e986f87 --- /dev/null +++ b/openapi/spec/paths/api@user@mfa@enable.yaml @@ -0,0 +1,32 @@ +put: + operationId: enableMFA + summary: Enable MFA + description: | + Enable MFA for a user. The secret and recovery codes must be created by the + generateMFA endpoint. Users with MFA already enabled cannot override their + MFA credentials; in these cases, a user must disable MFA before proceeding. + The recovery e-mail must be a valid value in order to enable the MFA. + tags: + - cloud + - mfa + - users + security: + - jwt: [] + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/mfaEnable.yaml + responses: + '200': + description: Success to enable a MFA. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@mfa@generate.yaml b/openapi/spec/paths/api@user@mfa@generate.yaml new file mode 100644 index 00000000000..7e1d1124b81 --- /dev/null +++ b/openapi/spec/paths/api@user@mfa@generate.yaml @@ -0,0 +1,27 @@ +get: + operationId: generateMFA + summary: Generate MFA Credentials + description: | + Generate the credentials to enable a user's MFA. The user must save the recovery codes a secure manner. + tags: + - cloud + - mfa + security: + - jwt: [] + responses: + '200': + description: Successfully generated MFA credentials. + content: + application/json: + schema: + $ref: '../components/schemas/mfaGenerate.yaml' + '400': + $ref: '../components/responses/400.yaml' + '401': + $ref: '../components/responses/401.yaml' + '403': + $ref: '../components/responses/403.yaml' + '404': + $ref: '../components/responses/404.yaml' + '500': + $ref: '../components/responses/500.yaml' diff --git a/openapi/spec/paths/api@user@mfa@recovery.yaml b/openapi/spec/paths/api@user@mfa@recovery.yaml new file mode 100644 index 00000000000..855fbf14243 --- /dev/null +++ b/openapi/spec/paths/api@user@mfa@recovery.yaml @@ -0,0 +1,43 @@ +post: + operationId: mfaRecover + summary: Recover MFA + description: | + Recover account access by providing one of the user's recovery codes. It + will be invalidated for future uses. + + The recovery code will be cached for 10 minutes. During this period, the + user can use the same recovery code to disable their MFA without needing to + provide two separate codes. The `X-Expires-At` header specifies the epoch + value marking the end of the cache period. + tags: + - cloud + - mfa + - users + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/mfaRecover.yaml + responses: + '200': + description: Success recover account's access. + headers: + X-Expires-At: + description: The epoch time at which the recovery code will become invalid. + schema: + type: string + minimum: 0 + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@mfa@reset.yaml b/openapi/spec/paths/api@user@mfa@reset.yaml new file mode 100644 index 00000000000..03a5c2ee51e --- /dev/null +++ b/openapi/spec/paths/api@user@mfa@reset.yaml @@ -0,0 +1,36 @@ +post: + operationId: requestResetMFA + summary: Request Reset MFA + description: | + Sends an email to both the user's main and recovery addresses. Each email + contains a unique code, which remains valid for at most 1 day. The user + must provide both codes to reset their MFA. + tags: + - cloud + - mfa + - users + requestBody: + content: + application/json: + schema: + type: object + properties: + identifier: + description: The same as the login identifier; can be either the user's email or username. + type: string + example: john_doe + required: + - identifier + responses: + '200': + description: Success to send the email. + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@mfa@reset@{user-id}.yaml b/openapi/spec/paths/api@user@mfa@reset@{user-id}.yaml new file mode 100644 index 00000000000..40f1deb50e1 --- /dev/null +++ b/openapi/spec/paths/api@user@mfa@reset@{user-id}.yaml @@ -0,0 +1,41 @@ +put: + operationId: resetMFA + summary: Reset MFA + description: | + Similar to the `disableMFA` operation, this endpoint uses the two codes sent + by `requestResetMFA` instead of a TOTP or recovery code. The user ID must + be the same as the one used for `requestResetMFA`. + tags: + - cloud + - mfa + - users + parameters: + - in: path + name: user-id + required: true + schema: + type: string + description: ID of the user trying to reset MFA. + example: 664e02087116dc765ef876a0 + requestBody: + content: + application/json: + schema: + $ref: ../components/schemas/mfaReset.yaml + responses: + '200': + description: Success to reset and authenticate. + content: + application/json: + schema: + $ref: ../components/schemas/userAuth.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@recover_password.yaml b/openapi/spec/paths/api@user@recover_password.yaml new file mode 100644 index 00000000000..9c6761e58fd --- /dev/null +++ b/openapi/spec/paths/api@user@recover_password.yaml @@ -0,0 +1,30 @@ +post: + operationId: recoverPassword + summary: Recover password + description: Send a recovery email to the user. + tags: + - cloud + + - users + security: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + username: + oneOf: + - $ref: ../components/schemas/userUsername.yaml + - $ref: ../components/schemas/userEmail.yaml + required: + - username + responses: + '200': + description: Success to send email to recover user password. + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@resend_email.yaml b/openapi/spec/paths/api@user@resend_email.yaml new file mode 100644 index 00000000000..608c1006520 --- /dev/null +++ b/openapi/spec/paths/api@user@resend_email.yaml @@ -0,0 +1,30 @@ +post: + operationId: resendEmail + summary: Resend confirmation + description: Resend confirmation to user. + tags: + - cloud + - users + security: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + username: + description: User's username. + type: string + minLength: 3 + maxLength: 20 + required: + - username + example: + username: example + responses: + '200': + description: Success to resend confirmation to user. + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@saml@auth.yaml b/openapi/spec/paths/api@user@saml@auth.yaml new file mode 100644 index 00000000000..0b3666636de --- /dev/null +++ b/openapi/spec/paths/api@user@saml@auth.yaml @@ -0,0 +1,34 @@ +get: + operationId: getSamlAuthUrl + summary: Get SAML authentication URL + description: | + Retrieves the Identity Provider (IdP) URL for authentication within ShellHub. + After successful authentication, users are automatically redirected to the ShellHub dashboard. + + To access this endpoint, SAML authentication must be enabled and the + instance must be running the Enterprise edition. If not, the endpoint + returns a `501 Not Implemented` status code. + tags: + - cloud + - users + security: [] + responses: + '200': + description: Successfully retrieved the SAML authentication URL + content: + application/json: + schema: + type: object + required: + - url + properties: + url: + type: string + description: The complete URL to the Identity Provider's login page + example: 'https://login.microsoftonline.com/tenant-id/saml2?SAMLRequest=encoded-request' + '400': + $ref: '../components/responses/400.yaml' + '500': + $ref: '../components/responses/500.yaml' + '501': + description: SAML authentication is not enabled or instance is not running Enterprise edition diff --git a/openapi/spec/paths/api@user@validation_account.yaml b/openapi/spec/paths/api@user@validation_account.yaml new file mode 100644 index 00000000000..06eb6ae6306 --- /dev/null +++ b/openapi/spec/paths/api@user@validation_account.yaml @@ -0,0 +1,40 @@ +get: + operationId: getValidateAccount + summary: Validate activation link + description: Validate the activation link for user. + tags: + - cloud + + - users + security: [] + parameters: + - name: email + description: User's email. + schema: + type: string + format: email + example: example@example.com + required: true + in: query + - name: token + description: |- + User's validation token. + + + It is a token received from the email used to validate the user. + schema: + type: string + example: 3dd0d1f8-8246-4519-b11a-a3dd33717f65 + required: true + in: query + responses: + '200': + description: Success to validate user. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@user@{uid}@update_password.yaml b/openapi/spec/paths/api@user@{uid}@update_password.yaml new file mode 100644 index 00000000000..d4ee9070f2a --- /dev/null +++ b/openapi/spec/paths/api@user@{uid}@update_password.yaml @@ -0,0 +1,44 @@ +post: + operationId: updateRecoverPassword + summary: Update user password + description: Update user password from a recovery token got from email. + tags: + - cloud + - users + security: [] + parameters: + - name: uid + description: User's UID. + schema: + type: string + example: 507f1f77bcf86cd799439011 + required: true + in: path + requestBody: + content: + application/json: + schema: + type: object + properties: + password: + $ref: ../components/schemas/userPassword.yaml + token: + description: | + User's recovery token. + + + It is the token from the email sent to user when the user request password reset. + type: string + example: + password: example + token: 3dd0d1f8-8246-4519-b11a-a3dd33717f65 + required: + - password + - token + responses: + '200': + description: Success to update user password. + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@users.yaml b/openapi/spec/paths/api@users.yaml new file mode 100644 index 00000000000..72da43c8ee0 --- /dev/null +++ b/openapi/spec/paths/api@users.yaml @@ -0,0 +1,46 @@ +patch: + operationId: updateUser + summary: Update user + tags: + - community + - users + security: + - jwt: [] + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + type: string + username: + type: string + email: + type: string + recovery_email: + description: | + A recovery email serves as the user's final recourse to regain + access to their account. It cannot be the same as the user's + primary email. Once defined, it cannot be updated to an empty + value. + type: string + password: + type: string + current_password: + description: | + It's required when updating the user's password. + type: string + responses: + '200': + $ref: ../components/responses/200.yaml + '400': + $ref: ../components/responses/invalidFields.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/conflictFields.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@users@invitations.yaml b/openapi/spec/paths/api@users@invitations.yaml new file mode 100644 index 00000000000..62cbe7e7de2 --- /dev/null +++ b/openapi/spec/paths/api@users@invitations.yaml @@ -0,0 +1,43 @@ +get: + operationId: getMembershipInvitationList + summary: Get membership invitations for the authenticated user + description: | + Returns a paginated list of membership invitations for the authenticated user. + This endpoint allows users to view all namespace invitations they have received. + tags: + - cloud + - members + - namespaces + security: + - jwt: [] + parameters: + - name: filter + description: | + Membership invitations filter. + + Filter field receives a base64 encoded JSON object to limit the search. + schema: + type: string + required: false + in: query + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + responses: + '200': + description: Successfully retrieved membership invitations list. + headers: + X-Total-Count: + description: Total number of membership invitations. + schema: + type: integer + minimum: 0 + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/membershipInvitation.yaml + '401': + $ref: ../components/responses/401.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@users@security.yaml b/openapi/spec/paths/api@users@security.yaml new file mode 100644 index 00000000000..20ed24f56a4 --- /dev/null +++ b/openapi/spec/paths/api@users@security.yaml @@ -0,0 +1,25 @@ +get: + operationId: checkSessionRecord + summary: Check session record status + description: Check status from if `session record` feature is enable. + tags: + - community + - sessions + - users + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success get session record status + content: + application/json: + schema: + description: Session record status + type: boolean + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@users@security@{tenant}.yaml b/openapi/spec/paths/api@users@security@{tenant}.yaml new file mode 100644 index 00000000000..1394d801e57 --- /dev/null +++ b/openapi/spec/paths/api@users@security@{tenant}.yaml @@ -0,0 +1,33 @@ +put: + operationId: setSessionRecord + summary: Set session record + description: Define if sessions will be recorded. + tags: + - community + - sessions + - users + security: + - jwt: [] + parameters: + - $ref: ../components/parameters/path/namespaceTenantIDPath.yaml + requestBody: + content: + application/json: + schema: + type: object + properties: + session_record: + description: Session's record status. + type: boolean + default: false + responses: + '200': + description: Success to set session record status. + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@users@{id}@data.yaml b/openapi/spec/paths/api@users@{id}@data.yaml new file mode 100644 index 00000000000..2cc99be1a3e --- /dev/null +++ b/openapi/spec/paths/api@users@{id}@data.yaml @@ -0,0 +1,52 @@ +patch: + operationId: updateUserData + deprecated: true + summary: Update user data + description: Update user's data. + tags: + - community + - users + security: + - jwt: [] + parameters: + - name: id + description: User's ID. + schema: + type: string + required: true + in: path + requestBody: + content: + application/json: + schema: + type: object + properties: + name: + description: User's name. + type: string + username: + description: User's username. + type: string + email: + description: User's e-mail. + type: string + format: email + recovery_email: + description: | + User's recovery e-mail. A recovery email serves as the user's + final recourse to regain access to their account. + type: string + format: email + responses: + '200': + $ref: ../components/responses/200.yaml + '400': + $ref: ../components/responses/invalidFields.yaml + '401': + $ref: ../components/responses/401.yaml + '404': + $ref: ../components/responses/404.yaml + '409': + $ref: ../components/responses/conflictFields.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@users@{id}@password.yaml b/openapi/spec/paths/api@users@{id}@password.yaml new file mode 100644 index 00000000000..66559a4f11e --- /dev/null +++ b/openapi/spec/paths/api@users@{id}@password.yaml @@ -0,0 +1,42 @@ +patch: + operationId: updateUserPassword + deprecated: true + summary: Update user password + description: Update only the user password. + tags: + - community + - users + security: + - jwt: [] + parameters: + - name: id + description: User ID + schema: + type: string + required: true + in: path + requestBody: + content: + application/json: + schema: + type: object + properties: + current_password: + description: User current password + type: string + new_password: + description: User new password + type: string + responses: + '200': + $ref: ../components/responses/200.yaml + '400': + $ref: ../components/responses/400.yaml + '401': + $ref: ../components/responses/401.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@web-endpoints.yaml b/openapi/spec/paths/api@web-endpoints.yaml new file mode 100644 index 00000000000..ed5f6df0d6f --- /dev/null +++ b/openapi/spec/paths/api@web-endpoints.yaml @@ -0,0 +1,96 @@ +get: + operationId: listWebEndpoints + summary: List web-endpoints + description: List all web-endpoints in the namespace. + tags: + - cloud + - web-endpoints + parameters: + - in: query + name: filter + description: | + Web endpoint's filter + + Filter field receives a base64 enconded JSON object for limit a search. + schema: + type: string + pattern: ^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$ + example: W3sidHlwZSI6InByb3BlcnR5IiwicGFyYW1zIjp7Im5hbWUiOiJuYW1lIiwib3BlcmF0b3IiOiJjb250YWlucyIsInZhbHVlIjoiZDAifX1d + - $ref: ../components/parameters/query/pageQuery.yaml + - $ref: ../components/parameters/query/perPageQuery.yaml + - $ref: ../components/parameters/query/sortByQuery.yaml + - $ref: ../components/parameters/query/orderByQuery.yaml + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Success to get the web-endpoints. + headers: + X-Total-Count: + description: Web endpoints' total number. + schema: + type: string + minimum: 0 + readOnly: true + content: + application/json: + schema: + type: array + items: + $ref: ../components/schemas/webendpoint.yaml + '400': + $ref: ../components/responses/400.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml +post: + operationId: createWebEndpoint + summary: Create a web-endpoint + description: Creates a new web-endpoint for a device. + tags: + - cloud + - web-endpoints + security: + - jwt: [] + - api-key: [] + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + uid: + $ref: ../components/schemas/deviceUID.yaml + host: + $ref: ../components/schemas/webendpointHost.yaml + port: + $ref: ../components/schemas/webendpointPort.yaml + ttl: + $ref: ../components/schemas/webendpointTTL.yaml + tls: + $ref: ../components/schemas/webendpointTLS.yaml + required: + - uid + - host + - port + - ttl + responses: + '200': + description: Web-endpoint created successfully. + content: + application/json: + schema: + $ref: ../components/schemas/webendpoint.yaml + '400': + $ref: ../components/responses/400.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/spec/paths/api@web-endpoints@{address}.yaml b/openapi/spec/paths/api@web-endpoints@{address}.yaml new file mode 100644 index 00000000000..7117851ace5 --- /dev/null +++ b/openapi/spec/paths/api@web-endpoints@{address}.yaml @@ -0,0 +1,23 @@ +parameters: + - $ref: ../components/parameters/path/tunnelAddressPath.yaml +delete: + operationId: deleteWebEndpoint + summary: Delete a web-endpoint + description: Deletes a web-endpoint by address. + tags: + - cloud + - web-endpoints + security: + - jwt: [] + - api-key: [] + responses: + '200': + description: Web-endpoint deleted successfully. + '400': + $ref: ../components/responses/400.yaml + '403': + $ref: ../components/responses/403.yaml + '404': + $ref: ../components/responses/404.yaml + '500': + $ref: ../components/responses/500.yaml diff --git a/openapi/static/index.html b/openapi/static/index.html new file mode 100644 index 00000000000..177a53d657b --- /dev/null +++ b/openapi/static/index.html @@ -0,0 +1,27 @@ + + + + ShellHub OpenAPI + + + + + + + + + + + + + diff --git a/pkg/agent/agent.go b/pkg/agent/agent.go deleted file mode 100644 index fced4d3ebf8..00000000000 --- a/pkg/agent/agent.go +++ /dev/null @@ -1,625 +0,0 @@ -// Package agent provides packages and functions to create a new ShellHub Agent instance. -// -// The ShellHub Agent is a lightweight software component that runs the device and provide communication between the -// device and ShellHub's server. Its main role is to provide a reserve SSH server always connected to the ShellHub -// server, allowing SSH connections to be established to the device even when it is behind a firewall or NAT. -// -// This package provides a simple API to create a new agent instance and start the communication with the server. The -// agent will automatically connect to the server and start listening for incoming connections. Once connected, the -// agent will also automatically reconnect to the server if the connection is lost. -// -// The update process isn't handled by this package. This feature is provided by its main implementation in -// [ShellHub Agent]. Check the [ShellHub Agent] documentation for more information. -// -// # Example: -// -// Creates the agent configuration with the minimum required fields: -// -// func main() { -// cfg := Config{ -// ServerAddress: "http://localhost:80", -// TenantID: "00000000-0000-4000-0000-000000000000", -// PrivateKey: "/tmp/shellhub.key", -// } -// -// ctx := context.Background() -// ag, err := NewAgentWithConfig(&cfg) -// if err != nil { -// panic(err) -// } -// -// if err := ag.Initialize(); err != nil { -// panic(err) -// } -// -// ag.Listen(ctx) -// } -// -// [ShellHub Agent]: https://github.com/shellhub-io/shellhub/tree/master/agent -package agent - -import ( - "context" - "crypto/rsa" - "io" - "net" - "net/http" - "net/url" - "os" - "runtime" - "strings" - "sync" - "time" - - "github.com/Masterminds/semver" - "github.com/labstack/echo/v4" - "github.com/pkg/errors" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/keygen" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/sysinfo" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/tunnel" - "github.com/shellhub-io/shellhub/pkg/agent/server" - "github.com/shellhub-io/shellhub/pkg/api/client" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/pkg/revdial" - log "github.com/sirupsen/logrus" -) - -// AgentVersion store the version to be embed inside the binary. This is -// injected using `-ldflags` build option. -// -// go build -ldflags "-X main.AgentVersion=1.2.3" -// -// If set to `latest`, the auto-updating mechanism is disabled. This is intended -// to be used during development only. -var AgentVersion string - -// AgentPlatform stores what platform the agent is running on. This is injected in build time in the [ShellHub Agent] -// implementation. -// -// [ShellHub Agent]: https://github.com/shellhub-io/shellhub/tree/master/agent -var AgentPlatform string - -// Config provides the configuration for the agent service. -type Config struct { - // Set the ShellHub Cloud server address the agent will use to connect. - // This is required. - ServerAddress string `env:"SERVER_ADDRESS,required"` - - // Specify the path to the device private key. - // If not provided, the agent will generate a new one. - // This is required. - PrivateKey string `env:"PRIVATE_KEY,required"` - - // Sets the account tenant id used during communication to associate the - // device to a specific tenant. - // This is required. - TenantID string `env:"TENANT_ID,required"` - - // Determine the interval to send the keep alive message to the server. This - // has a direct impact of the bandwidth used by the device when in idle - // state. Default is 30 seconds. - KeepAliveInterval int `env:"KEEPALIVE_INTERVAL,default=30"` - - // Set the device preferred hostname. This provides a hint to the server to - // use this as hostname if it is available. - PreferredHostname string `env:"PREFERRED_HOSTNAME"` - - // Set the device preferred identity. This provides a hint to the server to - // use this identity if it is available. - PreferredIdentity string `env:"PREFERRED_IDENTITY,default="` - - // Set password for single-user mode (without root privileges). If not provided, - // multi-user mode (with root privileges) is enabled by default. - // NOTE: The password hash could be generated by ```openssl passwd```. - SingleUserPassword string `env:"SIMPLE_USER_PASSWORD"` -} - -type Agent struct { - config *Config - pubKey *rsa.PublicKey - Identity *models.DeviceIdentity - Info *models.DeviceInfo - authData *models.DeviceAuthResponse - cli client.Client - serverInfo *models.Info - serverAddress *url.URL - sessions []string - server *server.Server - tunnel *tunnel.Tunnel - mux sync.RWMutex - listening chan bool - closed bool - mode Mode -} - -// NewAgent creates a new agent instance. -// -// address is the ShellHub Server address the agent will use to connect, tenantID is the namespace where the device -// will be registered and privateKey is the path to the device private key. If privateKey is empty, a new key will be -// generated. -// -// To add a full customisation configuration, use [NewAgentWithConfig] instead. -func NewAgent(address string, tenantID string, privateKey string, mode Mode) (*Agent, error) { - return NewAgentWithConfig(&Config{ - ServerAddress: address, - TenantID: tenantID, - PrivateKey: privateKey, - }, mode) -} - -// NewAgentWithConfig creates a new agent instance with a custom configuration. -// -// Check [Config] for more information. -func NewAgentWithConfig(config *Config, mode Mode) (*Agent, error) { - if config.ServerAddress == "" { - return nil, errors.New("address is empty") - } - - serverAddress, err := url.Parse(config.ServerAddress) - if err != nil { - return nil, errors.Wrap(err, "failed to parse address") - } - - cli, err := client.NewClient(config.ServerAddress) - if err != nil { - return nil, err - } - - if config.TenantID == "" { - return nil, errors.New("tenantID is empty") - } - - if config.PrivateKey == "" { - return nil, errors.New("privateKey is empty") - } - - if mode == nil { - return nil, errors.New("mode cannot be nil") - } - - a := &Agent{ - config: config, - serverAddress: serverAddress, - cli: cli, - listening: make(chan bool), - mode: mode, - } - - return a, nil -} - -// Initialize initializes agent, generating device identity, loading device information, generating private key, -// reading public key, probing server information and authorizing device on ShellHub server. -// -// When any of the steps fails, the agent will return an error, and the agent will not be able to start. -func (a *Agent) Initialize() error { - if err := a.generateDeviceIdentity(); err != nil { - return errors.Wrap(err, "failed to generate device identity") - } - - if err := a.loadDeviceInfo(); err != nil { - return errors.Wrap(err, "failed to load device info") - } - - if err := a.generatePrivateKey(); err != nil { - return errors.Wrap(err, "failed to generate private key") - } - - if err := a.readPublicKey(); err != nil { - return errors.Wrap(err, "failed to read public key") - } - - if err := a.probeServerInfo(); err != nil { - return errors.Wrap(err, "failed to probe server info") - } - - if err := a.authorize(); err != nil { - return errors.Wrap(err, "failed to authorize device") - } - - a.mux.Lock() - a.closed = false - a.mux.Unlock() - - return nil -} - -// generatePrivateKey generates a new private key if it doesn't exist on the filesystem. -func (a *Agent) generatePrivateKey() error { - if _, err := os.Stat(a.config.PrivateKey); os.IsNotExist(err) { - if err := keygen.GeneratePrivateKey(a.config.PrivateKey); err != nil { - return err - } - } - - return nil -} - -func (a *Agent) readPublicKey() error { - key, err := keygen.ReadPublicKey(a.config.PrivateKey) - a.pubKey = key - - return err -} - -// generateDeviceIdentity generates device identity. -// -// When preferred identity on Agent is set, it will be used instead of the network interface MAC address, what is the -// default value for this property. -func (a *Agent) generateDeviceIdentity() error { - if id := a.config.PreferredIdentity; id != "" { - a.Identity = &models.DeviceIdentity{ - MAC: id, - } - - return nil - } - - // get identity from network interface. - iface, err := sysinfo.PrimaryInterface() - if err != nil { - return err - } - - a.Identity = &models.DeviceIdentity{ - MAC: iface.HardwareAddr.String(), - } - - return nil -} - -// loadDeviceInfo load some device informations like OS name, version, arch and platform. -func (a *Agent) loadDeviceInfo() error { - info, err := a.mode.GetInfo() - if err != nil { - return err - } - - a.Info = &models.DeviceInfo{ - ID: info.ID, - PrettyName: info.Name, - Version: AgentVersion, - Platform: AgentPlatform, - Arch: runtime.GOARCH, - } - - return nil -} - -// probeServerInfo probe server information. -func (a *Agent) probeServerInfo() error { - info, err := a.cli.GetInfo(AgentVersion) - a.serverInfo = info - - return err -} - -// authorize send auth request to the server. -func (a *Agent) authorize() error { - data, err := a.cli.AuthDevice(&models.DeviceAuthRequest{ - Info: a.Info, - DeviceAuth: &models.DeviceAuth{ - Hostname: a.config.PreferredHostname, - Identity: a.Identity, - TenantID: a.config.TenantID, - PublicKey: string(keygen.EncodePublicKeyToPem(a.pubKey)), - }, - }) - - a.authData = data - - return err -} - -func (a *Agent) NewReverseListener(ctx context.Context) (*revdial.Listener, error) { - return a.cli.NewReverseListener(ctx, a.authData.Token) -} - -func (a *Agent) Close() error { - a.mux.Lock() - a.closed = true - a.mux.Unlock() - - return a.tunnel.Close() -} - -func connHandler(serv *server.Server) func(c echo.Context) error { - return func(c echo.Context) error { - hj, ok := c.Response().Writer.(http.Hijacker) - if !ok { - return c.String(http.StatusInternalServerError, "webserver doesn't support hijacking") - } - - conn, _, err := hj.Hijack() - if err != nil { - return c.String(http.StatusInternalServerError, "failed to hijack connection") - } - - id := c.Param("id") - httpConn := c.Request().Context().Value("http-conn").(net.Conn) - serv.Sessions.Store(id, httpConn) - serv.HandleConn(httpConn) - - conn.Close() - - return nil - } -} - -func httpHandler() func(c echo.Context) error { - return func(c echo.Context) error { - replyError := func(err error, msg string, code int) error { - log.WithError(err).WithFields(log.Fields{ - "remote": c.Request().RemoteAddr, - "namespace": c.Request().Header.Get("X-Namespace"), - "path": c.Request().Header.Get("X-Path"), - "version": AgentVersion, - }).Error(msg) - - return c.String(code, msg) - } - - in, err := net.Dial("tcp", ":80") - if err != nil { - return replyError(err, "failed to connect to HTTP server on device", http.StatusInternalServerError) - } - - defer in.Close() - - url, err := url.Parse(c.Request().Header.Get("X-Path")) - if err != nil { - return replyError(err, "failed to parse URL", http.StatusInternalServerError) - } - - c.Request().URL.Scheme = "http" - c.Request().URL = url - - if err := c.Request().Write(in); err != nil { - return replyError(err, "failed to write request to the server on device", http.StatusInternalServerError) - } - - out, _, err := c.Response().Hijack() - if err != nil { - return replyError(err, "failed to hijack connection", http.StatusInternalServerError) - } - - defer out.Close() // nolint:errcheck - - if _, err := io.Copy(out, in); err != nil { - return replyError(err, "failed to copy response from device service to client", http.StatusInternalServerError) - } - - return nil - } -} - -func closeHandler(a *Agent, serv *server.Server) func(c echo.Context) error { - return func(c echo.Context) error { - id := c.Param("id") - serv.CloseSession(id) - - log.WithFields( - log.Fields{ - "id": id, - "version": AgentVersion, - "tenant_id": a.authData.Namespace, - "server_address": a.config.ServerAddress, - }, - ).Info("A tunnel connection was closed") - - return nil - } -} - -// Listen creates a new SSH server, through a reverse connection between the Agent and the ShellHub server. -func (a *Agent) Listen(ctx context.Context) error { - a.mode.Serve(a) - - a.tunnel = tunnel.NewBuilder(). - WithConnHandler(connHandler(a.server)). - WithCloseHandler(closeHandler(a, a.server)). - WithHTTPHandler(httpHandler()). - Build() - - done := make(chan bool) - go func() { - for { - a.mux.RLock() - if a.closed { - log.WithFields(log.Fields{ - "version": AgentVersion, - "tenant_id": a.authData.Namespace, - "server_address": a.config.ServerAddress, - }).Info("Stopped listening for connections") - - done <- true - - a.mux.RUnlock() - - return - } - a.mux.RUnlock() - - namespace := a.authData.Namespace - tenantName := a.authData.Name - sshEndpoint := a.serverInfo.Endpoints.SSH - - sshid := strings.NewReplacer( - "{namespace}", namespace, - "{tenantName}", tenantName, - "{sshEndpoint}", strings.Split(sshEndpoint, ":")[0], - ).Replace("{namespace}.{tenantName}@{sshEndpoint}") - - listener, err := a.NewReverseListener(ctx) - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "version": AgentVersion, - "tenant_id": a.authData.Namespace, - "server_address": a.config.ServerAddress, - "ssh_server": sshEndpoint, - "sshid": sshid, - }).Error("Failed to connect to server through reverse tunnel. Retry in 10 seconds") - time.Sleep(time.Second * 10) - - continue - } - - log.WithFields(log.Fields{ - "namespace": namespace, - "hostname": tenantName, - "server_address": a.config.ServerAddress, - "ssh_server": sshEndpoint, - "sshid": sshid, - }).Info("Server connection established") - - a.listening <- true - - if err := a.tunnel.Listen(listener); err != nil { - // NOTICE: Tunnel'll only realize that it lost its connection to the ShellHub SSH when the next - // "keep-alive" connection fails. As a result, it will take this interval to reconnect to its server. - // - // It can be observed in the logs, that prints something like: - // 0000/00/00 00:00:00 revdial.Listener: error writing message to server: write tcp [::1]:00000->[::1]:80: write: broken pipe - log.WithError(err).WithFields(log.Fields{ - "namespace": namespace, - "hostname": tenantName, - "server_address": a.config.ServerAddress, - "ssh_server": sshEndpoint, - "sshid": sshid, - }).Error("Tunnel listener closed") - - listener.Close() // nolint:errcheck - a.listening <- false - - continue - } - - log.WithError(err).WithFields(log.Fields{ - "namespace": namespace, - "hostname": tenantName, - "server_address": a.config.ServerAddress, - "ssh_server": sshEndpoint, - "sshid": sshid, - }).Info("Tunnel listener closed") - - listener.Close() // nolint:errcheck - a.listening <- false - } - }() - - select { - case <-ctx.Done(): - if err := a.Close(); err != nil { - return err - } - - return nil - case <-done: - return nil - } -} - -// Ping sends an authtorization request to the server every ticker interval. -// -// If the durantion is 0, the default value set to it will be the 10 minutes. -// -// Ping will only sends its requests to the server if the agent is listening for connections. If the agent is not -// listening, the ping will be stopped. -func (a *Agent) Ping(ctx context.Context, durantion time.Duration) error { - if durantion == 0 { - durantion = 10 * time.Minute - } - - ticker := time.NewTicker(durantion) - <-a.listening // NOTE: wait for the first connection to start to ping the server. - - for { - a.mux.RLock() - if a.closed { - a.mux.RUnlock() - - return nil - } - a.mux.RUnlock() - - select { - case <-ctx.Done(): - log.WithFields(log.Fields{ - "version": AgentVersion, - "tenant_id": a.authData.Namespace, - "server_address": a.config.ServerAddress, - }).Debug("stopped pinging server due to context cancellation") - - return nil - case ok := <-a.listening: - if ok { - log.WithFields(log.Fields{ - "version": AgentVersion, - "tenant_id": a.authData.Namespace, - "server_address": a.config.ServerAddress, - "timestamp": time.Now(), - }).Info("Restarted pinging server") - - ticker.Reset(durantion) - } else { - log.WithFields(log.Fields{ - "version": AgentVersion, - "tenant_id": a.authData.Namespace, - "server_address": a.config.ServerAddress, - "timestamp": time.Now(), - }).Info("Stopped pinging server due listener status") - - ticker.Stop() - } - case <-ticker.C: - var sessions []string - a.server.Sessions.Range(func(k, _ interface{}) bool { - sessions = append(sessions, k.(string)) - - return true - }) - - a.sessions = sessions - - if err := a.authorize(); err != nil { - a.server.SetDeviceName(a.authData.Name) - } - - log.WithFields(log.Fields{ - "version": AgentVersion, - "tenant_id": a.authData.Namespace, - "server_address": a.config.ServerAddress, - "name": a.authData.Name, - "hostname": a.config.PreferredHostname, - "identity": a.config.PreferredIdentity, - "timestamp": time.Now(), - }).Info("Ping") - } - } -} - -// CheckUpdate gets the ShellHub's server version. -func (a *Agent) CheckUpdate() (*semver.Version, error) { - info, err := a.cli.GetInfo(AgentVersion) - if err != nil { - return nil, err - } - - return semver.NewVersion(info.Version) -} - -// GetInfo gets the ShellHub's server information like version and endpoints, and updates the Agent's server's info. -func (a *Agent) GetInfo() (*models.Info, error) { - if a.serverInfo != nil { - return a.serverInfo, nil - } - - info, err := a.cli.GetInfo(AgentVersion) - if err != nil { - return nil, err - } - - a.serverInfo = info - - return info, nil -} diff --git a/pkg/agent/agent_test.go b/pkg/agent/agent_test.go deleted file mode 100644 index 6c560b7c66f..00000000000 --- a/pkg/agent/agent_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package agent - -func ExampleNewAgentWithConfig() { - _, err := NewAgentWithConfig(&Config{ - ServerAddress: "http://localhost:80", - TenantID: "00000000-0000-4000-0000-000000000000", - PrivateKey: "./shellhub.key", - }, new(HostMode)) - if err != nil { - panic(err) - } -} - -func ExampleNewAgent() { - _, err := NewAgent("http://localhost:80", "00000000-0000-4000-0000-000000000000", "./shellhub.key", new(HostMode)) - if err != nil { - panic(err) - } -} diff --git a/pkg/agent/connector/connector.go b/pkg/agent/connector/connector.go deleted file mode 100644 index ff0f16cd194..00000000000 --- a/pkg/agent/connector/connector.go +++ /dev/null @@ -1,38 +0,0 @@ -package connector - -import ( - "context" -) - -// ConnectorVersion stores the version of the ShellHub Instane that is running the connector. -// It is used in the ShellHub Agents initialized by the connector when a container is started. -var ConnectorVersion string - -// Container is a struct that represents a container that will be managed by the connector. -type Container struct { - // ID is the container ID. - ID string - // Name is the container name. - Name string - // ServerAddress is the ShellHub address of the server that the agent will connect to. - ServerAddress string - // Tenant is the tenant ID of the namespace that the agent belongs to. - Tenant string - // PrivateKey is the private key of the device. Specify the path to store the container private key. If not - // provided, the agent will generate a new one. This is required. - PrivateKey string - // Cancel is a function that is used to stop the goroutine that is running the agent for this container. - Cancel context.CancelFunc -} - -// Connector is an interface that defines the methods that a connector must implement. -type Connector interface { - // List lists all containers running on the host. - List(ctx context.Context) ([]Container, error) - // Start starts the agent for the container with the given ID. - Start(ctx context.Context, id string, name string) - // Stop stops the agent for the container with the given ID. - Stop(ctx context.Context, id string) - // Listen listens for events and starts or stops the agent for the container that was created or removed. - Listen(ctx context.Context) error -} diff --git a/pkg/agent/connector/docker.go b/pkg/agent/connector/docker.go deleted file mode 100644 index 358a72a8505..00000000000 --- a/pkg/agent/connector/docker.go +++ /dev/null @@ -1,268 +0,0 @@ -package connector - -import ( - "context" - "fmt" - "sync" - "time" - - "github.com/docker/docker/api/types" - "github.com/docker/docker/api/types/events" - dockerclient "github.com/docker/docker/client" - "github.com/shellhub-io/shellhub/pkg/agent" - log "github.com/sirupsen/logrus" -) - -var _ Connector = new(DockerConnector) - -// DockerConnector is a struct that represents a connector that uses Docker as the container runtime. -type DockerConnector struct { - mu sync.Mutex - // server is the ShellHub address of the server that the agent will connect to. - server string - // tenant is the tenant ID of the namespace that the agent belongs to. - tenant string - // cli is the Docker client. - cli *dockerclient.Client - // privateKeys is the path to the directory that contains the private keys for the containers. - privateKeys string - // cancels is a map that contains the cancel functions for each container. - // This is used to stop the agent for a container, marking as done its context and closing the agent. - cancels map[string]context.CancelFunc -} - -// NewDockerConnector creates a new [Connector] that uses Docker as the container runtime. -func NewDockerConnector(server string, tenant string, privateKey string) (Connector, error) { - cli, err := dockerclient.NewClientWithOpts(dockerclient.FromEnv, dockerclient.WithAPIVersionNegotiation()) - if err != nil { - return nil, err - } - - return &DockerConnector{ - server: server, - tenant: tenant, - cli: cli, - privateKeys: privateKey, - cancels: make(map[string]context.CancelFunc), - }, nil -} - -// events returns the docker events. -func (d *DockerConnector) events(ctx context.Context) (<-chan events.Message, <-chan error) { - return d.cli.Events(ctx, types.EventsOptions{}) -} - -func (d *DockerConnector) List(ctx context.Context) ([]Container, error) { - containers, err := d.cli.ContainerList(ctx, types.ContainerListOptions{}) - if err != nil { - return nil, err - } - - list := make([]Container, len(containers)) - for i, container := range containers { - list[i].ID = container.ID - - name, err := d.getContainerNameFromID(ctx, container.ID) - if err != nil { - return nil, err - } - - list[i].Name = name - } - - return list, nil -} - -// Start starts the agent for the container with the given ID. -func (d *DockerConnector) Start(ctx context.Context, id string, name string) { - id = id[:12] - - d.mu.Lock() - ctx, d.cancels[id] = context.WithCancel(ctx) - d.mu.Unlock() - - privateKey := fmt.Sprintf("%s/%s.key", d.privateKeys, id) - go initContainerAgent(ctx, d.cli, Container{ - ID: id, - Name: name, - ServerAddress: d.server, - Tenant: d.tenant, - PrivateKey: privateKey, - Cancel: d.cancels[id], - }) -} - -// Stop stops the agent for the container with the given ID. -func (d *DockerConnector) Stop(_ context.Context, id string) { - id = id[:12] - - d.mu.Lock() - defer d.mu.Unlock() - - cancel, ok := d.cancels[id] - if ok { - cancel() - delete(d.cancels, id) - } -} - -func (d *DockerConnector) getContainerNameFromID(ctx context.Context, id string) (string, error) { - container, err := d.cli.ContainerInspect(ctx, id) - if err != nil { - return "", err - } - - // NOTICE: It removes the first character on container's name that is a `/`. - return container.Name[1:], nil -} - -// Listen listens for events and starts or stops the agent for the containers. -func (d *DockerConnector) Listen(ctx context.Context) error { - containers, err := d.List(ctx) - if err != nil { - return err - } - - for _, container := range containers { - d.Start(ctx, container.ID, container.Name) - } - - events, errs := d.events(ctx) - for { - select { - case <-ctx.Done(): - return nil - case err := <-errs: - return err - case container := <-events: - // NOTICE: "start" and "die" Docker's events are call every time a new container start or stop, - // independently how the command was run. For example, if a container was started with `docker run -d`, the - // "start" event will be called, but if the same container was started with `docker start `, - // the "start" event will be called too. The same happens with the "die" event. - switch container.Action { - case "start": - name, err := d.getContainerNameFromID(ctx, container.ID) - if err != nil { - return err - } - - d.Start(ctx, container.ID, name) - case "die": - d.Stop(ctx, container.ID) - } - } - } -} - -// initContainerAgent initializes the agent for a container. -func initContainerAgent(ctx context.Context, cli *dockerclient.Client, container Container) { - agent.AgentPlatform = "connector" - agent.AgentVersion = ConnectorVersion - - cfg := &agent.Config{ - ServerAddress: container.ServerAddress, - TenantID: container.Tenant, - PrivateKey: container.PrivateKey, - PreferredIdentity: container.ID, - PreferredHostname: container.Name, - KeepAliveInterval: 30, - } - - log.WithFields(log.Fields{ - "id": container.ID, - "identity": cfg.PreferredIdentity, - "hostname": cfg.PreferredHostname, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "timestamp": time.Now(), - "version": agent.AgentVersion, - }).Info("Connector container started") - - mode, err := agent.NewConnectorMode(cli, container.ID) - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "id": container.ID, - "identity": cfg.PreferredIdentity, - "hostname": cfg.PreferredHostname, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "timestamp": time.Now(), - "version": agent.AgentVersion, - }).Fatal("Failed to create connector mode") - } - - ag, err := agent.NewAgentWithConfig(cfg, mode) - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "id": container.ID, - "configuration": cfg, - "version": agent.AgentVersion, - }).Fatal("Failed to create agent") - } - - if err := ag.Initialize(); err != nil { - log.WithError(err).WithFields(log.Fields{ - "id": container.ID, - "configuration": cfg, - "version": agent.AgentVersion, - }).Fatal("Failed to initialize agent") - } - - go func() { - if err := ag.Ping(ctx, 0); err != nil { - log.WithError(err).WithFields(log.Fields{ - "id": container.ID, - "identity": cfg.PreferredIdentity, - "hostname": cfg.PreferredHostname, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "timestamp": time.Now(), - "version": agent.AgentVersion, - }).Fatal("Failed to ping server") - } - - log.WithFields(log.Fields{ - "id": container.ID, - "identity": cfg.PreferredIdentity, - "hostname": cfg.PreferredHostname, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "timestamp": time.Now(), - "version": agent.AgentVersion, - }).Info("Stopped pinging server") - }() - - log.WithFields(log.Fields{ - "id": container.ID, - "identity": cfg.PreferredIdentity, - "hostname": cfg.PreferredHostname, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "timestamp": time.Now(), - "version": agent.AgentVersion, - }).Info("Listening for connections") - - // NOTICE(r): listing for connection and wait for a channel message to close the agent. It will receives - // this mensagem when something out of this goroutine send a `done`, what will cause the agent closes - // and no more connection to be allowed until it be started again. - if err := ag.Listen(ctx); err != nil { - log.WithError(err).WithFields(log.Fields{ - "id": container.ID, - "identity": cfg.PreferredIdentity, - "hostname": cfg.PreferredHostname, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "timestamp": time.Now(), - "version": agent.AgentVersion, - }).Fatal("Failed to listen for connections") - } - - log.WithFields(log.Fields{ - "id": container.ID, - "identity": cfg.PreferredIdentity, - "hostname": cfg.PreferredHostname, - "tenant_id": cfg.TenantID, - "server_address": cfg.ServerAddress, - "version": agent.AgentVersion, - }).Info("Connector container done") -} diff --git a/pkg/agent/modes.go b/pkg/agent/modes.go deleted file mode 100644 index 7faa150cd7a..00000000000 --- a/pkg/agent/modes.go +++ /dev/null @@ -1,123 +0,0 @@ -package agent - -import ( - "context" - "os/exec" - - dockerclient "github.com/docker/docker/client" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/sysinfo" - "github.com/shellhub-io/shellhub/pkg/agent/server" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes/connector" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes/host" -) - -type Info struct { - ID string - Name string -} - -// Mode is the Agent execution mode. -// -// The Agent can be executed in two different modes: `Host` and `Connector`. -// The `Host` mode is the default one, where the agent will listen for incoming connections and use the host device as -// source of any information needed to start itself. When running in `Connector` mode, it uses the Docker engine as this -// source. -// -// Check [HostMode] and [ConnectorMode] for more information. -type Mode interface { - // Serve prepares the Agent for listening, setting up the SSH server, its modes and values on Agent's. - Serve(agent *Agent) - // GetInfo gets information about Agent according to Agent's mode. - // - // When Agent is running on [HostMode], the info got is from the system where the Agent is running, but when running - // in [ConnectorMode], the data is retrieved from Docker Engine. - GetInfo() (*Info, error) -} - -// ModeHost is the Agent execution mode for `Host`. -// -// The host mode is the default mode one, and turns the host machine into a ShellHub's Agent. The host is -// responsible for the SSH server, authentication and authorization, `/etc/passwd`, `/etc/shadow`, and etc. -type HostMode struct{} - -var _ Mode = new(HostMode) - -func (m *HostMode) Serve(agent *Agent) { - agent.server = server.NewServer( - agent.cli, - agent.authData, - agent.config.PrivateKey, - agent.config.KeepAliveInterval, - agent.config.SingleUserPassword, - &host.Mode{ - Authenticator: *host.NewAuthenticator(agent.cli, agent.authData, agent.config.SingleUserPassword, &agent.authData.Name), - Sessioner: *host.NewSessioner(&agent.authData.Name, make(map[string]*exec.Cmd)), - }, - ) - - agent.server.SetDeviceName(agent.authData.Name) -} - -func (m *HostMode) GetInfo() (*Info, error) { - osrelease, err := sysinfo.GetOSRelease() - if err != nil { - return nil, err - } - - return &Info{ - ID: osrelease.ID, - Name: osrelease.Name, - }, nil -} - -// ModeConnector is the Agent execution mode for `Connector`. -// -// The `Connector` mode is used to turn a container inside a host into a single device ShellHub's Agent. The host is -// responsible for the SSH server, but the authentication and authorization is made by either the conainer -// internals, `passwd` or `shadow`, or by the ShellHub API. -type ConnectorMode struct { - cli *dockerclient.Client - identity string -} - -func NewConnectorMode(cli *dockerclient.Client, identity string) (Mode, error) { - return &ConnectorMode{ - cli: cli, - identity: identity, - }, nil -} - -var _ Mode = new(ConnectorMode) - -func (m *ConnectorMode) Serve(agent *Agent) { - // NOTICE: When the agent is running in `Connector` mode, we need to identify the container ID to maintain the - // communication between the server and the agent when the container name on the host changes. This information is - // saved inside the device's identity, avoiding significant changes in the current state of the agent. - // TODO: Evaluate if we can use another field than "MAC" to store the container ID. - agent.server = server.NewServer( - agent.cli, - agent.authData, - agent.config.PrivateKey, - agent.config.KeepAliveInterval, - agent.config.SingleUserPassword, - &connector.Mode{ - Authenticator: *connector.NewAuthenticator(agent.cli, m.cli, agent.authData, &agent.Identity.MAC), - Sessioner: *connector.NewSessioner(&agent.Identity.MAC, m.cli), - }, - ) - - agent.server.SetContainerID(agent.Identity.MAC) - agent.server.SetDeviceName(agent.authData.Name) -} - -func (m *ConnectorMode) GetInfo() (*Info, error) { - info, err := m.cli.ContainerInspect(context.Background(), m.identity) - if err != nil { - return nil, err - } - - return &Info{ - ID: "docker", - Name: info.Config.Image, - }, nil -} diff --git a/pkg/agent/pkg/osauth/auth.go b/pkg/agent/pkg/osauth/auth.go deleted file mode 100644 index 70de24f1f8e..00000000000 --- a/pkg/agent/pkg/osauth/auth.go +++ /dev/null @@ -1,221 +0,0 @@ -package osauth - -import ( - "bufio" - "errors" - "fmt" - "io" - "os" - "strconv" - "strings" - - "github.com/GehirnInc/crypt" - _ "github.com/GehirnInc/crypt/md5_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages - _ "github.com/GehirnInc/crypt/sha256_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages - _ "github.com/GehirnInc/crypt/sha512_crypt" // GehirnInc/crypt uses blank imports for crypto subpackages - "github.com/shellhub-io/shellhub/pkg/agent/pkg/yescrypt" - "github.com/sirupsen/logrus" -) - -//go:generate mockery --name=OSAuther --filename=osauther.go -type OSAuther interface { - AuthUser(username, password string) bool - AuthUserFromShadow(username, password string, shadow io.Reader) bool - VerifyPasswordHash(hash, password string) bool - LookupUser(username string) *User - LookupUserFromPasswd(username string, passwd io.Reader) (*User, error) -} - -type OSAuth struct{} - -func (l *OSAuth) AuthUser(username, password string) bool { - shadow, err := os.Open(DefaultShadowFilename) - if err != nil { - logrus.WithError(err).Error("Could not open /etc/shadow") - - return false - } - defer shadow.Close() - - if ok := l.AuthUserFromShadow(username, password, shadow); !ok { - logrus.WithFields(logrus.Fields{ - "username": username, - }).Debug("Failed to authenticate user from shadow file") - - return false - } - - return true -} - -// AuthUserFromShadow checks if the given username and password are valid for the given shadow file. -func (l *OSAuth) AuthUserFromShadow(username string, password string, shadow io.Reader) bool { - entries, err := parseShadowReader(shadow) - if err != nil { - logrus.WithError(err).Debug("Error parsing shadow file") - - return false - } - - entry, ok := entries[username] - if !ok { - logrus.WithFields(logrus.Fields{ - "username": username, - }).Error("User not found") - - return false - } - - return l.VerifyPasswordHash(entry.Password, password) -} - -func (l *OSAuth) VerifyPasswordHash(hash, password string) bool { - if hash == "" { - logrus.Error("Password entry is empty") - - return false - } - - // If hash algorithm is yescrypt verify by ourselves, otherwise let's try crypt package - if strings.HasPrefix(hash, "$y$") { - return yescrypt.Verify(password, hash) - } - - if ok := crypt.IsHashSupported(hash); !ok { - logrus.Error("The crypto algorithm is not supported") - - return false - } - - crypt := crypt.NewFromHash(hash) - if crypt == nil { - logrus.Error("Could not detect password crypto algorithm from shadow entry") - - return false - } - - if err := crypt.Verify(hash, []byte(password)); err != nil { - logrus.WithError(err).Debug("Error verifying password hash") - - return false - } - - return true -} - -// ErrUserNotFound is returned when the user is not found in the passwd file. -var ErrUserNotFound = errors.New("user not found") - -// LookupUserFromPasswd reads the passwd file from the given reader and returns the user, if found. -// TODO: Use this function inside the LookupUser. -func (l *OSAuth) LookupUserFromPasswd(username string, passwd io.Reader) (*User, error) { - entries, err := parsePasswdReader(passwd) - if err != nil { - logrus.WithError(err).Error("Error parsing passwd file") - - return nil, err - } - - user, found := entries[username] - if !found { - logrus.WithFields(logrus.Fields{ - "username": username, - }).Error("User not found in passwd file") - - return nil, ErrUserNotFound - } - - return &user, nil -} - -func (l *OSAuth) LookupUser(username string) *User { - if os.Geteuid() != 0 { - return singleUser() - } - - passwd, err := os.Open(DefaultPasswdFilename) - if err != nil { - logrus.Errorf("Could not open %s", DefaultPasswdFilename) - - return nil - } - defer passwd.Close() - - user, err := l.LookupUserFromPasswd(username, passwd) - if err != nil { - return nil - } - - return user -} - -var DefaultShadowFilename = "/etc/shadow" - -type ShadowEntry struct { - Username string // Login name - Password string // Hashed password - Lastchanged int // Days since Jan 1, 1970 that password was last changed - Minimum int // The minimum number of days required between password changes i.e. the number of days left before the user is allowed to change his/her password - Maximum int // The maximum number of days the password is valid (after that user is forced to change his/her password) - Warn int // The number of days before password is to expire that user is warned that his/her password must be changed - Inactive int // The number of days after password expires that account is disabled - Expire int // Days since Jan 1, 1970 that account is disabled i.e. an absolute date specifying when the login may no longer be used. -} - -func parseShadowReader(r io.Reader) (map[string]ShadowEntry, error) { - lines := bufio.NewReader(r) - entries := make(map[string]ShadowEntry) - - for { - line, _, err := lines.ReadLine() - if err != nil { - break - } - - if len(line) == 0 || strings.HasPrefix(string(line), "#") { - continue - } - - entry, err := parseShadowLine(string(line)) - if err != nil { - return nil, err - } - - entries[entry.Username] = entry - } - - return entries, nil //nolint:nilerr -} - -func parseShadowLine(line string) (ShadowEntry, error) { - result := ShadowEntry{} - parts := strings.Split(strings.TrimSpace(line), ":") - if len(parts) != 9 { - return result, fmt.Errorf("shadow line had wrong number of parts %d != 9", len(parts)) - } - - result.Username = strings.TrimSpace(parts[0]) - result.Password = strings.TrimSpace(parts[1]) - - result.Lastchanged = parseIntString(parts[2]) - result.Minimum = parseIntString(parts[3]) - result.Maximum = parseIntString(parts[4]) - result.Warn = parseIntString(parts[5]) - result.Inactive = parseIntString(parts[6]) - result.Expire = parseIntString(parts[7]) - - return result, nil -} - -func parseIntString(value string) int { - if value != "" { - return 0 - } - - number, err := strconv.Atoi(strings.TrimSpace(value)) - if err != nil { - return 0 - } - - return number -} diff --git a/pkg/agent/pkg/osauth/auth_test.go b/pkg/agent/pkg/osauth/auth_test.go deleted file mode 100644 index 9423c0715fc..00000000000 --- a/pkg/agent/pkg/osauth/auth_test.go +++ /dev/null @@ -1,34 +0,0 @@ -package osauth - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestVerifyPasswordHashPass(t *testing.T) { - hashPassword := "$6$CMWxpgkq.ZosUW8N$gN/MkheCdS9SsPrFS6oOd/k.TMvY2KHztJE5pDMRdN35zr00dyxQr3pYGM4rtPPduUIrEFCwuB7oVgzDbiMfN." //nolint:gosec - passwd := "123" - - result := new(OSAuth).VerifyPasswordHash(hashPassword, passwd) - - assert.True(t, result) -} - -func TestVerifyPasswordHashFail(t *testing.T) { - hashPassword := "$6$CMWxpgkq.ZosUW8N$gN/MkheCdS9SsPrFS6oOd/k.TMvY2KHztJE5pDMRdN35zr00dyxQr3pYGM4rtPPduUIrEFCwuB7oVgzDbiMfN." //nolint:gosec - passwd := "test" - - result := new(OSAuth).VerifyPasswordHash(hashPassword, passwd) - - assert.False(t, result) -} - -func TestVerifyPasswordHashMD5Pass(t *testing.T) { - hashPassword := "$1$YW4a91HG$31CtH9bzW/oyJ1VOD.H/d/" //nolint:gosec - passwd := "test" - - result := new(OSAuth).VerifyPasswordHash(hashPassword, passwd) - - assert.True(t, result) -} diff --git a/pkg/agent/pkg/osauth/mocks/osauther.go b/pkg/agent/pkg/osauth/mocks/osauther.go deleted file mode 100644 index 1930c16a6a1..00000000000 --- a/pkg/agent/pkg/osauth/mocks/osauther.go +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated by mockery v2.20.0. DO NOT EDIT. - -package mocks - -import ( - io "io" - - osauth "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" - mock "github.com/stretchr/testify/mock" -) - -// OSAuther is an autogenerated mock type for the OSAuther type -type OSAuther struct { - mock.Mock -} - -// AuthUser provides a mock function with given fields: username, password -func (_m *OSAuther) AuthUser(username string, password string) bool { - ret := _m.Called(username, password) - - var r0 bool - if rf, ok := ret.Get(0).(func(string, string) bool); ok { - r0 = rf(username, password) - } else { - r0 = ret.Get(0).(bool) - } - - return r0 -} - -// AuthUserFromShadow provides a mock function with given fields: username, password, shadow -func (_m *OSAuther) AuthUserFromShadow(username string, password string, shadow io.Reader) bool { - ret := _m.Called(username, password, shadow) - - var r0 bool - if rf, ok := ret.Get(0).(func(string, string, io.Reader) bool); ok { - r0 = rf(username, password, shadow) - } else { - r0 = ret.Get(0).(bool) - } - - return r0 -} - -// LookupUser provides a mock function with given fields: username -func (_m *OSAuther) LookupUser(username string) *osauth.User { - ret := _m.Called(username) - - var r0 *osauth.User - if rf, ok := ret.Get(0).(func(string) *osauth.User); ok { - r0 = rf(username) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*osauth.User) - } - } - - return r0 -} - -// LookupUserFromPasswd provides a mock function with given fields: username, passwd -func (_m *OSAuther) LookupUserFromPasswd(username string, passwd io.Reader) (*osauth.User, error) { - ret := _m.Called(username, passwd) - - var r0 *osauth.User - var r1 error - if rf, ok := ret.Get(0).(func(string, io.Reader) (*osauth.User, error)); ok { - return rf(username, passwd) - } - if rf, ok := ret.Get(0).(func(string, io.Reader) *osauth.User); ok { - r0 = rf(username, passwd) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*osauth.User) - } - } - - if rf, ok := ret.Get(1).(func(string, io.Reader) error); ok { - r1 = rf(username, passwd) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// VerifyPasswordHash provides a mock function with given fields: hash, password -func (_m *OSAuther) VerifyPasswordHash(hash string, password string) bool { - ret := _m.Called(hash, password) - - var r0 bool - if rf, ok := ret.Get(0).(func(string, string) bool); ok { - r0 = rf(hash, password) - } else { - r0 = ret.Get(0).(bool) - } - - return r0 -} - -type mockConstructorTestingTNewOSAuther interface { - mock.TestingT - Cleanup(func()) -} - -// NewOSAuther creates a new instance of OSAuther. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -func NewOSAuther(t mockConstructorTestingTNewOSAuther) *OSAuther { - mock := &OSAuther{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/pkg/agent/pkg/osauth/user.go b/pkg/agent/pkg/osauth/user.go deleted file mode 100644 index 5ab88196334..00000000000 --- a/pkg/agent/pkg/osauth/user.go +++ /dev/null @@ -1,100 +0,0 @@ -package osauth - -import ( - "bufio" - "fmt" - "io" - "os" - "os/user" - "strconv" - "strings" -) - -var DefaultPasswdFilename = "/etc/passwd" - -type User struct { - UID uint32 - GID uint32 - Username string - Password string - Name string - HomeDir string - Shell string -} - -func singleUser() *User { - var uid, gid int - var username, name, homeDir, shell string - u, err := user.Current() - uid, _ = strconv.Atoi(os.Getenv("UID")) - homeDir = os.Getenv("HOME") - shell = os.Getenv("SHELL") - if err == nil { - uid, _ = strconv.Atoi(u.Uid) - gid, _ = strconv.Atoi(u.Gid) - username = u.Username - name = u.Name - homeDir = u.HomeDir - } - - return &User{ - UID: uint32(uid), - GID: uint32(gid), - Username: username, - Name: name, - HomeDir: homeDir, - Shell: shell, - } -} - -func parsePasswdReader(r io.Reader) (map[string]User, error) { - lines := bufio.NewReader(r) - entries := make(map[string]User) - for { - line, _, err := lines.ReadLine() - if err != nil { - break - } - - if len(line) == 0 || strings.HasPrefix(string(line), "#") { - continue - } - - entry, err := parsePasswdLine(string(line)) - if err != nil { - return nil, err - } - - entries[entry.Username] = entry - } - - return entries, nil //nolint:nilerr -} - -func parsePasswdLine(line string) (User, error) { - result := User{} - parts := strings.Split(strings.TrimSpace(line), ":") - if len(parts) != 7 { - return result, fmt.Errorf("passwd line had wrong number of parts %d != 7", len(parts)) - } - result.Username = strings.TrimSpace(parts[0]) - result.Password = strings.TrimSpace(parts[1]) - - uid, err := strconv.Atoi(parts[2]) - if err != nil { - return result, fmt.Errorf("passwd line had badly formatted uid %s", parts[2]) - } - result.UID = uint32(uid) - - gid, err := strconv.Atoi(parts[3]) - if err != nil { - return result, fmt.Errorf("passwd line had badly formatted gid %s", parts[3]) - } - result.GID = uint32(gid) - - result.Name = strings.TrimSpace(parts[4]) - result.HomeDir = strings.TrimSpace(parts[5]) - result.Shell = strings.TrimSpace(parts[6]) - - return result, nil -} diff --git a/pkg/agent/pkg/tunnel/tunnel.go b/pkg/agent/pkg/tunnel/tunnel.go deleted file mode 100644 index a17d0549f8a..00000000000 --- a/pkg/agent/pkg/tunnel/tunnel.go +++ /dev/null @@ -1,98 +0,0 @@ -package tunnel - -import ( - "context" - "net" - "net/http" - - "github.com/labstack/echo/v4" - "github.com/shellhub-io/shellhub/pkg/revdial" -) - -type Tunnel struct { - router *echo.Echo - srv *http.Server - HTTPHandler func(e echo.Context) error - ConnHandler func(e echo.Context) error - CloseHandler func(e echo.Context) error -} - -type Builder struct { - tunnel *Tunnel -} - -func NewBuilder() *Builder { - return &Builder{ - tunnel: NewTunnel(), - } -} - -func (t *Builder) WithHTTPHandler(handler func(e echo.Context) error) *Builder { - t.tunnel.HTTPHandler = handler - - return t -} - -func (t *Builder) WithConnHandler(handler func(e echo.Context) error) *Builder { - t.tunnel.ConnHandler = handler - - return t -} - -func (t *Builder) WithCloseHandler(handler func(e echo.Context) error) *Builder { - t.tunnel.CloseHandler = handler - - return t -} - -func (t *Builder) Build() *Tunnel { - return t.tunnel -} - -func NewTunnel() *Tunnel { - e := echo.New() - - t := &Tunnel{ - router: e, - srv: &http.Server{ - Handler: e, - ConnContext: func(ctx context.Context, c net.Conn) context.Context { - return context.WithValue(ctx, "http-conn", c) //nolint:revive - }, - }, - HTTPHandler: func(e echo.Context) error { - panic("HTTPHandler can not be nil") - }, - ConnHandler: func(e echo.Context) error { - panic("connHandler can not be nil") - }, - CloseHandler: func(e echo.Context) error { - panic("closeHandler can not be nil") - }, - } - e.GET("/ssh/http", func(e echo.Context) error { - return t.HTTPHandler(e) - }) - e.GET("/ssh/:id", func(e echo.Context) error { - return t.ConnHandler(e) - }) - e.GET("/ssh/close/:id", func(e echo.Context) error { - return t.CloseHandler(e) - }) - - return t -} - -// Listen to reverse listener. -func (t *Tunnel) Listen(l *revdial.Listener) error { - return t.srv.Serve(l) -} - -// Close closes the tunnel. -func (t *Tunnel) Close() error { - if err := t.router.Close(); err != nil { - return err - } - - return t.srv.Close() -} diff --git a/pkg/agent/pkg/yescrypt/yescript.go b/pkg/agent/pkg/yescrypt/yescript.go deleted file mode 100644 index 4d5fa229c37..00000000000 --- a/pkg/agent/pkg/yescrypt/yescript.go +++ /dev/null @@ -1,25 +0,0 @@ -//go:build !without_cgo -// +build !without_cgo - -package yescrypt - -/* -#cgo LDFLAGS: -lcrypt -#include -#include -*/ -import "C" -import "unsafe" - -// Verify verifies a yescrypt hash against a given key. -func Verify(key, hash string) bool { - ckey := C.CString(key) - chash := C.CString(hash) - - out := C.crypt(ckey, chash) - - C.free(unsafe.Pointer(ckey)) - C.free(unsafe.Pointer(chash)) - - return C.GoString(out) == hash -} diff --git a/pkg/agent/server/modes/connector/authenticator.go b/pkg/agent/server/modes/connector/authenticator.go deleted file mode 100644 index c0bff5f242f..00000000000 --- a/pkg/agent/server/modes/connector/authenticator.go +++ /dev/null @@ -1,290 +0,0 @@ -package connector - -import ( - "archive/tar" - "context" - "crypto" - "crypto/rsa" - "crypto/sha256" - "encoding/base64" - "encoding/json" - "io" - - dockerclient "github.com/docker/docker/client" - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes" - "github.com/shellhub-io/shellhub/pkg/api/client" - "github.com/shellhub-io/shellhub/pkg/models" - log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" -) - -// NOTICE: Ensures the Authenticator interface is implemented. -var _ modes.Authenticator = (*Authenticator)(nil) - -// Authenticator implements the Authenticator interface when the server is running in connector mode. -type Authenticator struct { - // api is a client to communicate with the ShellHub's API. - api client.Client - // authData is the authentication data received from the API to authenticate the device. - authData *models.DeviceAuthResponse - // container is the device name. - // - // NOTICE: Uses a pointer for later assignment. - container *string - // docker is a client to communicate with the Docker's API. - docker dockerclient.APIClient - // osauth is an instance of the OSAuth interface to authenticate the user on the Operating System. - osauth osauth.OSAuther -} - -// NewAuthenticator creates a new instance of Authenticator for the connector mode. -func NewAuthenticator(api client.Client, docker dockerclient.APIClient, authData *models.DeviceAuthResponse, container *string) *Authenticator { - return &Authenticator{ - api: api, - authData: authData, - container: container, - docker: docker, - osauth: new(osauth.OSAuth), - } -} - -// getPasswd return a [io.Reader] for the container's passwd file. -func getPasswd(ctx context.Context, cli dockerclient.APIClient, container string) (io.Reader, error) { - passwdTar, _, err := cli.CopyFromContainer(ctx, container, "/etc/passwd") - if err != nil { - return nil, err - } - - passwd := tar.NewReader(passwdTar) - if _, err := passwd.Next(); err != nil { - return nil, err - } - - return passwd, nil -} - -// Password handles the server's SSH password authentication when server is running in connector mode. -func (a *Authenticator) Password(ctx gliderssh.Context, username string, password string) bool { - passwd, err := getPasswd(ctx, a.docker, *a.container) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("failed to get the passwd file from container") - - return false - } - - user, err := a.osauth.LookupUserFromPasswd(username, passwd) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("failed to lookup for the user on passwd file") - - return false - } - - if user.Password == "" { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("user passwd is empty, so the authentication via password is blocked") - - // NOTICE(r): when the user doesn't have password, we block the login. - return false - } - - shadowTar, _, err := a.docker.CopyFromContainer(ctx, *a.container, "/etc/shadow") - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("failed to get the shadow file from the container") - - return false - } - - shadow := tar.NewReader(shadowTar) - if _, err := shadow.Next(); err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("failed to get the shadow file from the tar") - - return false - } - - if !a.osauth.AuthUserFromShadow(username, password, shadow) { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("failed to authenticate the user on the device") - - return false - } - - // NOTICE: set the osauth.User to the context to be obtained later on. - ctx.SetValue("user", user) - - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).Info("using password authentication") - - return true -} - -// PublicKey handles the server's SSH public key authentication when server is running in connector mode. -func (a *Authenticator) PublicKey(ctx gliderssh.Context, username string, key gliderssh.PublicKey) bool { - passwd, err := getPasswd(ctx, a.docker, *a.container) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("failed to get the passwd file from container") - - return false - } - - user, err := a.osauth.LookupUserFromPasswd(username, passwd) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("failed to lookup for the user on passwd file") - - return false - } - - type Signature struct { - Username string - Namespace string - } - - sig := &Signature{ - Username: username, - Namespace: *a.container, - } - - sigBytes, err := json.Marshal(sig) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - }, - ).WithError(err).Error("failed to marshal signature") - - return false - } - - sigHash := sha256.Sum256(sigBytes) - - fingerprint := gossh.FingerprintLegacyMD5(key) - res, err := a.api.AuthPublicKey(&models.PublicKeyAuthRequest{ - Fingerprint: fingerprint, - Data: string(sigBytes), - }, a.authData.Token) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - "fingerprint": fingerprint, - }, - ).WithError(err).Error("failed to authenticate the user via public key") - - return false - } - - digest, err := base64.StdEncoding.DecodeString(res.Signature) - if err != nil { - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - "fingerprint": fingerprint, - }, - ).WithError(err).Error("failed to decode the signature") - - return false - } - - return false - } - - cryptoKey, ok := key.(gossh.CryptoPublicKey) - if !ok { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - "fingerprint": fingerprint, - }, - ).Error("failed to get the crypto public key") - - return false - } - - pubCrypto := cryptoKey.CryptoPublicKey() - - pubKey, ok := pubCrypto.(*rsa.PublicKey) - if !ok { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - "fingerprint": fingerprint, - }, - ).Error("failed to convert the crypto public key") - - return false - } - - if err = rsa.VerifyPKCS1v15(pubKey, crypto.SHA256, sigHash[:], digest); err != nil { - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - "fingerprint": fingerprint, - }, - ).WithError(err).Error("failed to verify the signature") - - return false - } - - // NOTICE: set the osauth.User to the context to be obtained later on. - ctx.SetValue("user", user) - - log.WithFields( - log.Fields{ - "container": *a.container, - "username": username, - "fingerprint": fingerprint, - }, - ).Info("using public key authentication") - - return true -} diff --git a/pkg/agent/server/modes/connector/connector.go b/pkg/agent/server/modes/connector/connector.go deleted file mode 100644 index 143928c2301..00000000000 --- a/pkg/agent/server/modes/connector/connector.go +++ /dev/null @@ -1,88 +0,0 @@ -// Package connector defines methods for authentication and sessions handles to SSH when it is running in connector mode. -// -// Connector mode means that the SSH's server runs in the host machine, but redirect the IO to a specific docker -// container, maning its authentication through the container's "/etc/passwd", "/etc/shadow" and etc. -package connector - -import ( - "context" - - "github.com/docker/docker/api/types" - dockerclient "github.com/docker/docker/client" - "github.com/docker/docker/pkg/process" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" -) - -type Mode struct { - Authenticator - Sessioner -} - -func attachShellToContainer(ctx context.Context, cli dockerclient.APIClient, container string, user *osauth.User, size [2]uint) (*types.HijackedResponse, string, error) { - return attachToContainer(ctx, cli, "shell", container, user, true, []string{}, size) -} - -func attachExecToContainer(ctx context.Context, cli dockerclient.APIClient, container string, user *osauth.User, isPty bool, commands []string, size [2]uint) (*types.HijackedResponse, string, error) { - return attachToContainer(ctx, cli, "exec", container, user, isPty, commands, size) -} - -func attachHereDocToContainer(ctx context.Context, cli dockerclient.APIClient, container string, user *osauth.User, size [2]uint) (*types.HijackedResponse, string, error) { - return attachToContainer(ctx, cli, "heredoc", container, user, false, []string{}, size) -} - -func attachToContainer(ctx context.Context, cli dockerclient.APIClient, requestType string, container string, user *osauth.User, isPty bool, commands []string, size [2]uint) (*types.HijackedResponse, string, error) { - if user.Shell == "" { - user.Shell = "/bin/sh" - } - - id, err := cli.ContainerExecCreate(ctx, container, types.ExecConfig{ - User: user.Username, - Tty: isPty, - ConsoleSize: &size, - AttachStdin: true, - AttachStdout: true, - AttachStderr: true, - Cmd: func() []string { - switch requestType { - case "shell": - return []string{user.Shell} - case "exec": - // NOTE(r): when the exec session's has `-t` or `-tt` flag, the command must be executed into a tty/pty. - // the Shell's `-c` flag is used to do this. - if isPty { - return append([]string{user.Shell, "-c"}, commands...) - } - - return commands - case "heredoc": - return []string{user.Shell} - default: - return []string{} - } - }(), - }) - if err != nil { - return nil, "", err - } - - res, err := cli.ContainerExecAttach(ctx, id.ID, types.ExecStartCheck{ - Tty: isPty, - ConsoleSize: &size, - }) - - return &res, id.ID, err -} - -func exitCodeExecFromContainer(cli dockerclient.APIClient, id string) (int, error) { - inspected, err := cli.ContainerExecInspect(context.Background(), id) - if err != nil { - return -1, err - } - - if inspected.Running { - // NOTICE: when a process is running after the exec command, it is necessary to kill it. - return 0, process.Kill(inspected.Pid) - } - - return inspected.ExitCode, nil -} diff --git a/pkg/agent/server/modes/connector/sessioner.go b/pkg/agent/server/modes/connector/sessioner.go deleted file mode 100644 index 20e7f3d7de7..00000000000 --- a/pkg/agent/server/modes/connector/sessioner.go +++ /dev/null @@ -1,212 +0,0 @@ -package connector - -import ( - "errors" - "fmt" - "io" - "sync" - - dockerclient "github.com/docker/docker/client" - "github.com/docker/docker/pkg/stdcopy" - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes" -) - -var ErrUserNotFound = errors.New("user not found on context") - -// NOTICE: Ensures the Sessioner interface is implemented. -var _ modes.Sessioner = (*Sessioner)(nil) - -// Sessioner implements the Sessioner interface when the server is running in connector mode. -type Sessioner struct { - // container is the device name. - // - // NOTICE: It's a pointer because when the server is created, we don't know the device name yet, that is set later. - container *string - docker dockerclient.APIClient -} - -// NewSessioner creates a new instance of Sessioner for the connector mode. -// The container is a pointer to a string because when the server is created, we don't know the device name yet, that -// is set later. -func NewSessioner(container *string, docker dockerclient.APIClient) *Sessioner { - return &Sessioner{ - container: container, - docker: docker, - } -} - -// Shell handles the server's SSH shell session when server is running in connector mode. -func (s *Sessioner) Shell(session gliderssh.Session) error { - sspty, _, _ := session.Pty() - - // NOTICE(r): To identify what the container the connector should connect to, we use the `deviceName` as the container name - container := *s.container - - user, ok := session.Context().Value("user").(*osauth.User) - if !ok { - return ErrUserNotFound - } - - resp, id, err := attachShellToContainer(session.Context(), s.docker, container, user, [2]uint{uint(sspty.Window.Height), uint(sspty.Window.Width)}) - if err != nil { - return err - } - defer resp.Close() - - var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() - defer func() { - code, err := exitCodeExecFromContainer(s.docker, id) - if err != nil { - fmt.Println(err) - } - - session.Exit(code) //nolint:errcheck - }() - - if _, err := io.Copy(session, resp.Conn); err != nil && err != io.EOF { - fmt.Println(err) - } - }() - - wg.Add(1) - go func() { - defer wg.Done() - defer resp.Close() - - if _, err := io.Copy(resp.Conn, session); err != nil && err != io.EOF { - fmt.Println(err) - } - }() - - wg.Wait() - - return nil -} - -// Exec handles the SSH's server exec session when server is running in connector mode. -func (s *Sessioner) Exec(session gliderssh.Session) error { - sspty, _, isPty := session.Pty() - - // NOTICE(r): To identify what the container the connector should connect to, we use the `deviceName` as the container name - container := *s.container - - user, ok := session.Context().Value("user").(*osauth.User) - if !ok { - return ErrUserNotFound - } - - resp, id, err := attachExecToContainer(session.Context(), s.docker, container, user, isPty, session.Command(), [2]uint{uint(sspty.Window.Height), uint(sspty.Window.Width)}) - if err != nil { - return err - } - defer resp.Close() - - var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() - defer func() { - code, err := exitCodeExecFromContainer(s.docker, id) - if err != nil { - fmt.Println(err) - } - - session.Exit(code) //nolint:errcheck - }() - - // NOTICE: According to the [Docker] documentation, we can "demultiplex" a command sent to container, but only - // when the exec started doesn't allocate a TTY. As a result, we check if the exec's is requesting it and do - // what was recommended by [Docker]'s to get the stdout and stderr separately. - // - // [Docker]: https://pkg.go.dev/github.com/docker/docker/client#Client.ContainerAttach - if isPty { - if _, err := io.Copy(session, resp.Reader); err != nil && err != io.EOF { - fmt.Println(err) - } - } else { - if _, err := stdcopy.StdCopy(session, session.Stderr(), resp.Reader); err != nil && err != io.EOF { - fmt.Println(err) - } - } - }() - - wg.Add(1) - go func() { - defer wg.Done() - defer resp.CloseWrite() //nolint:errcheck - - if _, err := io.Copy(resp.Conn, session); err != nil && err != io.EOF { - fmt.Println(err) - } - }() - - wg.Wait() - - return nil -} - -// Heredoc handles the server's SSH heredoc session when server is running in connector mode. -// -// heredoc is special block of code that contains multi-line strings that will be redirected to a stdin of a shell. It -// request a shell, but doesn't allocate a pty. -func (s *Sessioner) Heredoc(session gliderssh.Session) error { - sspty, _, _ := session.Pty() - - // NOTICE(r): To identify what the container the connector should connect to, we use the `deviceName` as the container name - container := *s.container - - user, ok := session.Context().Value("user").(*osauth.User) - if !ok { - return ErrUserNotFound - } - - resp, id, err := attachHereDocToContainer(session.Context(), s.docker, container, user, [2]uint{uint(sspty.Window.Height), uint(sspty.Window.Width)}) - if err != nil { - return err - } - defer resp.Close() - - var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() - defer func() { - code, err := exitCodeExecFromContainer(s.docker, id) - if err != nil { - fmt.Println(err) - } - - session.Exit(code) //nolint:errcheck - }() - - if _, err := stdcopy.StdCopy(session, session.Stderr(), resp.Reader); err != nil && err != io.EOF { - fmt.Println(err) - } - }() - - wg.Add(1) - go func() { - defer wg.Done() - defer resp.CloseWrite() //nolint:errcheck - - if _, err := io.Copy(resp.Conn, session); err != nil && err != io.EOF { - fmt.Println(err) - } - }() - - wg.Wait() - - return nil -} - -// SFTP handles the SSH's server sftp session when server is running in connector mode. -// -// sftp is a subsystem of SSH that allows file operations over SSH. -func (s *Sessioner) SFTP(_ gliderssh.Session) error { - return errors.New("SFTP isn't supported to ShellHub Agent in connector mode") -} diff --git a/pkg/agent/server/modes/host/authenticator.go b/pkg/agent/server/modes/host/authenticator.go deleted file mode 100644 index acb74023049..00000000000 --- a/pkg/agent/server/modes/host/authenticator.go +++ /dev/null @@ -1,187 +0,0 @@ -package host - -import ( - "crypto" - "crypto/rsa" - "crypto/sha256" - "encoding/base64" - "encoding/json" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes" - "github.com/shellhub-io/shellhub/pkg/api/client" - "github.com/shellhub-io/shellhub/pkg/models" - log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" -) - -// NOTICE: Ensures the Authenticator interface is implemented. -var _ modes.Authenticator = (*Authenticator)(nil) - -// Authenticator implements the Authenticator interface when the server is running in host mode. -type Authenticator struct { - // api is a client to communicate with the ShellHub's API. - api client.Client - // authData is the authentication data received from the API to authenticate the device. - authData *models.DeviceAuthResponse - // singleUserPassword is the password of the single user. - // When it is empty, it means that the single user is disabled. - singleUserPassword string - // deviceName is the device name. - // - // NOTICE: Uses a pointer for later assignment. - deviceName *string - // osauth is an instance of the OSAuth interface to authenticate the user on the Operating System. - osauth osauth.OSAuther -} - -// NewAuthenticator creates a new instance of Authenticator for the host mode. -// It receives the api client to perform requests to the ShellHub's API, the authentication data received by the agent -// when started the communication between it and the agent, the singleUserPassword, what indicates is is running at -// this mode and the deviceName. -// -// The deviceName is a pointer to a string because when the server is created, we don't know the device name yet, that -// is set later. -func NewAuthenticator(api client.Client, authData *models.DeviceAuthResponse, singleUserPassword string, deviceName *string) *Authenticator { - return &Authenticator{ - api: api, - authData: authData, - singleUserPassword: singleUserPassword, - deviceName: deviceName, - osauth: new(osauth.OSAuth), - } -} - -// Password handles the server's SSH password authentication when server is running in host mode. -func (a *Authenticator) Password(ctx gliderssh.Context, _ string, pass string) bool { - log := log.WithFields(log.Fields{ - "user": ctx.User(), - }) - var ok bool - - if a.singleUserPassword == "" { - ok = a.osauth.AuthUser(ctx.User(), pass) - } else { - ok = a.osauth.VerifyPasswordHash(a.singleUserPassword, pass) - } - - if ok { - log.Info("Using password authentication") - } else { - log.Info("Failed to authenticate using password") - } - - return ok -} - -// PublicKey handles the server's SSH public key authentication when server is running in host mode. -func (a *Authenticator) PublicKey(ctx gliderssh.Context, _ string, key gliderssh.PublicKey) bool { - if a.osauth.LookupUser(ctx.User()) == nil { - return false - } - - type Signature struct { - Username string - Namespace string - } - - sig := &Signature{ - Username: ctx.User(), - Namespace: *a.deviceName, - } - - sigBytes, err := json.Marshal(sig) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.deviceName, - "username": ctx.User(), - }, - ).WithError(err).Error("failed to marshal signature") - - return false - } - - sigHash := sha256.Sum256(sigBytes) - - fingerprint := gossh.FingerprintLegacyMD5(key) - res, err := a.api.AuthPublicKey(&models.PublicKeyAuthRequest{ - Fingerprint: fingerprint, - Data: string(sigBytes), - }, a.authData.Token) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.deviceName, - "username": ctx.User(), - "fingerprint": fingerprint, - }, - ).WithError(err).Error("failed to authenticate the user via public key") - - return false - } - - digest, err := base64.StdEncoding.DecodeString(res.Signature) - if err != nil { - log.WithFields( - log.Fields{ - "container": *a.deviceName, - "username": ctx.User(), - "fingerprint": fingerprint, - }, - ).WithError(err).Error("failed to decode the signature") - - return false - } - - cryptoKey, ok := key.(gossh.CryptoPublicKey) - if !ok { - log.WithFields( - log.Fields{ - "container": *a.deviceName, - "username": ctx.User(), - "fingerprint": fingerprint, - }, - ).Error("failed to get the crypto public key") - - return false - } - - pubCrypto := cryptoKey.CryptoPublicKey() - - pubKey, ok := pubCrypto.(*rsa.PublicKey) - if !ok { - log.WithFields( - log.Fields{ - "container": *a.deviceName, - "username": ctx.User(), - "fingerprint": fingerprint, - }, - ).Error("failed to convert the crypto public key") - - return false - } - - if err = rsa.VerifyPKCS1v15(pubKey, crypto.SHA256, sigHash[:], digest); err != nil { - log.WithFields( - log.Fields{ - "container": *a.deviceName, - "username": ctx.User(), - "fingerprint": fingerprint, - }, - ).WithError(err).Error("failed to verify the signature") - - return false - } - - log.WithFields( - log.Fields{ - "container": *a.deviceName, - "username": ctx.User(), - "fingerprint": fingerprint, - }, - ).Info("using public key authentication") - - return true -} diff --git a/pkg/agent/server/modes/host/authenticator_test.go b/pkg/agent/server/modes/host/authenticator_test.go deleted file mode 100644 index 37568506067..00000000000 --- a/pkg/agent/server/modes/host/authenticator_test.go +++ /dev/null @@ -1,297 +0,0 @@ -package host - -import ( - "crypto" - "crypto/rand" - "crypto/rsa" - "crypto/sha256" - "encoding/base64" - "encoding/json" - "errors" - "testing" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/go-playground/assert/v2" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" - osauthMocks "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth/mocks" - clientMocks "github.com/shellhub-io/shellhub/pkg/api/client/mocks" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/stretchr/testify/mock" - gossh "golang.org/x/crypto/ssh" -) - -func TestPublicKey(t *testing.T) { - // stringToRef is a helper function to convert a string to a pointer to a string. - stringToRef := func(s string) *string { return &s } - - privKey, _ := rsa.GenerateKey(rand.Reader, 2048) - key, _ := gossh.NewPublicKey(&privKey.PublicKey) - - tests := []struct { - ctx gliderssh.Context - authenticator *Authenticator - name string - user string - key gliderssh.PublicKey - requiredMocs func(osauthMock *osauthMocks.OSAuther, apiMock *clientMocks.Client) - expected bool - }{ - { - ctx: &testSSHContext{ - user: "", - }, - authenticator: &Authenticator{ - deviceName: stringToRef("device"), - api: new(clientMocks.Client), - osauth: new(osauthMocks.OSAuther), - }, - name: "return false when user is not found", - user: "", - key: nil, - requiredMocs: func(osauthMock *osauthMocks.OSAuther, apiMock *clientMocks.Client) { - osauthMock.On("LookupUser", "").Return(nil).Once() - }, - expected: false, - }, - { - ctx: &testSSHContext{ - user: "test", - }, - authenticator: &Authenticator{ - authData: &models.DeviceAuthResponse{ - Token: "token", - }, - singleUserPassword: "", - deviceName: stringToRef("device"), - api: new(clientMocks.Client), - osauth: new(osauthMocks.OSAuther), - }, - name: "return false when public key api request fails", - user: "", - key: key, - requiredMocs: func(osauthMock *osauthMocks.OSAuther, apiMock *clientMocks.Client) { - osauthMock.On("LookupUser", "test").Return(&osauth.User{}).Once() - apiMock.On("AuthPublicKey", mock.Anything, "token").Return(nil, errors.New("error")).Once() - }, - expected: false, - }, - { - ctx: &testSSHContext{ - user: "test", - }, - authenticator: &Authenticator{ - authData: &models.DeviceAuthResponse{ - Token: "token", - }, - singleUserPassword: "", - deviceName: stringToRef("device"), - api: new(clientMocks.Client), - osauth: new(osauthMocks.OSAuther), - }, - name: "return false when public key signature is invalid", - user: "", - key: key, - requiredMocs: func(osauthMock *osauthMocks.OSAuther, apiMock *clientMocks.Client) { - osauthMock.On("LookupUser", "test").Return(&osauth.User{}).Once() - apiMock.On("AuthPublicKey", mock.Anything, "token").Return(&models.PublicKeyAuthResponse{ - Signature: "signature", - }, nil).Once() - }, - expected: false, - }, - { - ctx: &testSSHContext{ - user: "test", - }, - authenticator: &Authenticator{ - authData: &models.DeviceAuthResponse{ - Token: "token", - }, - singleUserPassword: "", - deviceName: stringToRef("device"), - api: new(clientMocks.Client), - osauth: new(osauthMocks.OSAuther), - }, - name: "return true when public key signature does not implement crypto.PublicKey", - user: "", - key: key, - requiredMocs: func(osauthMock *osauthMocks.OSAuther, apiMock *clientMocks.Client) { - osauthMock.On("LookupUser", "test").Return(&osauth.User{}).Once() - apiMock.On("AuthPublicKey", mock.Anything, "token").Return(&models.PublicKeyAuthResponse{ - Signature: base64.StdEncoding.EncodeToString([]byte("signature")), - }, nil).Once() - }, - expected: false, - }, - { - ctx: &testSSHContext{ - user: "test", - }, - authenticator: &Authenticator{ - authData: &models.DeviceAuthResponse{ - Token: "token", - }, - singleUserPassword: "", - deviceName: stringToRef("device"), - api: new(clientMocks.Client), - osauth: new(osauthMocks.OSAuther), - }, - name: "fail when public key returned by crypto.PublicKey is not a pointer to a rsa.PublicKey", - user: "", - key: key, - requiredMocs: func(osauthMock *osauthMocks.OSAuther, apiMock *clientMocks.Client) { - osauthMock.On("LookupUser", "test").Return(&osauth.User{}).Once() - apiMock.On("AuthPublicKey", mock.Anything, "token").Return(&models.PublicKeyAuthResponse{ - Signature: base64.StdEncoding.EncodeToString([]byte("signature")), - }, nil).Once() - }, - expected: false, - }, - { - ctx: &testSSHContext{ - user: "test", - }, - authenticator: &Authenticator{ - authData: &models.DeviceAuthResponse{ - Token: "token", - }, - singleUserPassword: "", - deviceName: stringToRef("device"), - api: new(clientMocks.Client), - osauth: new(osauthMocks.OSAuther), - }, - name: "return false when public key returned by crypto.PublicKey does not pass on rsa.VerifyPKCS1v15", - user: "", - key: key, - requiredMocs: func(osauthMock *osauthMocks.OSAuther, apiMock *clientMocks.Client) { - osauthMock.On("LookupUser", "test").Return(&osauth.User{}).Once() - apiMock.On("AuthPublicKey", mock.Anything, "token").Return(&models.PublicKeyAuthResponse{ - Signature: base64.StdEncoding.EncodeToString([]byte("signature")), - }, nil).Once() - }, - expected: false, - }, - { - ctx: &testSSHContext{ - user: "test", - }, - authenticator: &Authenticator{ - authData: &models.DeviceAuthResponse{ - Token: "token", - }, - singleUserPassword: "", - deviceName: stringToRef("device"), - api: new(clientMocks.Client), - osauth: new(osauthMocks.OSAuther), - }, - name: "return true when public key signature is valid", - user: "", - key: key, - requiredMocs: func(osauthMock *osauthMocks.OSAuther, apiMock *clientMocks.Client) { - type Signature struct { - Username string - Namespace string - } - - sigBytes, _ := json.Marshal(&Signature{ - Username: "test", - Namespace: "device", - }) - - digest := sha256.Sum256(sigBytes) - - signature, _ := rsa.SignPKCS1v15(rand.Reader, privKey, crypto.SHA256, digest[:]) - - osauthMock.On("LookupUser", "test").Return(&osauth.User{}).Once() - apiMock.On("AuthPublicKey", &models.PublicKeyAuthRequest{ - Fingerprint: gossh.FingerprintLegacyMD5(key), - Data: string(sigBytes), - }, "token").Return(&models.PublicKeyAuthResponse{ - Signature: base64.StdEncoding.EncodeToString(signature), - }, nil).Once() - }, - expected: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - tt.requiredMocs(tt.authenticator.osauth.(*osauthMocks.OSAuther), tt.authenticator.api.(*clientMocks.Client)) - - ok := tt.authenticator.PublicKey(tt.ctx, tt.user, tt.key) - assert.Equal(t, tt.expected, ok) - }) - } -} - -func TestPassword(t *testing.T) { - tests := []struct { - ctx gliderssh.Context - authenticator *Authenticator - name string - user string - password string - requiredMocs func(osauth *osauthMocks.OSAuther) - expected bool - }{ - { - ctx: &testSSHContext{user: "test"}, - authenticator: &Authenticator{osauth: new(osauthMocks.OSAuther)}, - name: "return false when user or password are invalid", - user: "", - password: "password", - requiredMocs: func(osauth *osauthMocks.OSAuther) { - osauth.On("AuthUser", "test", "password").Return(false).Once() - }, - expected: false, - }, - { - ctx: &testSSHContext{user: "test"}, - authenticator: &Authenticator{osauth: new(osauthMocks.OSAuther)}, - name: "return true when user and password are valid", - user: "", - password: "password", - requiredMocs: func(osauth *osauthMocks.OSAuther) { - osauth.On("AuthUser", "test", "password").Return(true).Once() - }, - expected: true, - }, - { - ctx: &testSSHContext{user: "test"}, - authenticator: &Authenticator{ - osauth: new(osauthMocks.OSAuther), - singleUserPassword: "test", - }, - name: "return false when single user is enabled and password is invalid", - user: "", - password: "password", - requiredMocs: func(osauth *osauthMocks.OSAuther) { - osauth.On("VerifyPasswordHash", "test", "password").Return(false).Once() - }, - expected: false, - }, - { - ctx: &testSSHContext{user: "test"}, - authenticator: &Authenticator{ - osauth: new(osauthMocks.OSAuther), - singleUserPassword: "test", - }, - name: "return true when single user is enabled and password is valid", - user: "", - password: "password", - requiredMocs: func(osauth *osauthMocks.OSAuther) { - osauth.On("VerifyPasswordHash", "test", "password").Return(true).Once() - }, - expected: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - tt.requiredMocs(tt.authenticator.osauth.(*osauthMocks.OSAuther)) - - got := tt.authenticator.Password(tt.ctx, tt.user, tt.password) - assert.Equal(t, tt.expected, got) - }) - } -} diff --git a/pkg/agent/server/modes/host/command/command_docker.go b/pkg/agent/server/modes/host/command/command_docker.go deleted file mode 100644 index 268eb1398e7..00000000000 --- a/pkg/agent/server/modes/host/command/command_docker.go +++ /dev/null @@ -1,83 +0,0 @@ -//go:build docker -// +build docker - -package command - -import ( - "fmt" - "os" - "os/exec" - "strconv" - - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" -) - -func NewCmd(u *osauth.User, shell, term, host string, command ...string) *exec.Cmd { - nscommand, _ := nsenterCommandWrapper(u.UID, u.GID, u.HomeDir, command...) - - cmd := exec.Command(nscommand[0], nscommand[1:]...) //nolint:gosec - cmd.Env = []string{ - "TERM=" + term, - "HOME=" + u.HomeDir, - "SHELL=" + shell, - "USER=" + u.Username, - "LOGNAME=" + u.Username, - "SHELLHUB_HOST=" + host, - } - - return cmd -} - -func getWrappedCommand(nsArgs []string, uid, gid uint32, home string) []string { - setPrivCmd := []string{ - "/usr/bin/setpriv", - "--init-groups", - "--ruid", - strconv.Itoa(int(uid)), - "--regid", - strconv.Itoa(int(gid)), - } - - nsenterCmd := append([]string{ - "/usr/bin/nsenter", - "-t", - "1", - }, nsArgs...) - - nsenterCmd = append(nsenterCmd, - []string{ - "-S", - strconv.Itoa(int(uid)), - fmt.Sprintf("--wdns=%s", home), - }..., - ) - - return append(setPrivCmd, nsenterCmd...) -} - -func nsenterCommandWrapper(uid, gid uint32, home string, command ...string) ([]string, error) { - if _, err := os.Stat("/usr/bin/nsenter"); err != nil && !os.IsNotExist(err) { - return nil, err - } - - paths := map[string]string{ - "mnt": "-m", - "uts": "-u", - "ipc": "-i", - "net": "-n", - "pid": "-p", - "cgroup": "-C", - "time": "-T", - } - - args := []string{} - for path, params := range paths { - if _, err := os.Stat(fmt.Sprintf("/proc/1/ns/%s", path)); err != nil { - continue - } - - args = append(args, params) - } - - return append(getWrappedCommand(args, uid, gid, home), command...), nil -} diff --git a/pkg/agent/server/modes/host/command/command_native.go b/pkg/agent/server/modes/host/command/command_native.go deleted file mode 100644 index 9a28829c2de..00000000000 --- a/pkg/agent/server/modes/host/command/command_native.go +++ /dev/null @@ -1,53 +0,0 @@ -//go:build !docker -// +build !docker - -package command - -import ( - "os" - "os/exec" - "os/user" - "strconv" - "syscall" - - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" - log "github.com/sirupsen/logrus" -) - -func NewCmd(u *osauth.User, shell, term, host string, command ...string) *exec.Cmd { - user, _ := user.Lookup(u.Username) - userGroups, _ := user.GroupIds() - - // Supplementary groups for the user - groups := make([]uint32, 0) - for _, sgid := range userGroups { - igid, _ := strconv.Atoi(sgid) - groups = append(groups, uint32(igid)) - } - if len(groups) == 0 { - groups = append(groups, u.GID) - } - - cmd := exec.Command(command[0], command[1:]...) //nolint:gosec - cmd.Env = []string{ - "TERM=" + term, - "HOME=" + u.HomeDir, - "SHELL=" + shell, - "SHELLHUB_HOST=" + host, - } - - if _, err := os.Stat(u.HomeDir); err != nil { - log.WithError(err).WithField("dir", u.HomeDir).Warn("setting user's home directory to /") - - cmd.Dir = "/" - } else { - cmd.Dir = u.HomeDir - } - - if os.Geteuid() == 0 { - cmd.SysProcAttr = &syscall.SysProcAttr{} - cmd.SysProcAttr.Credential = &syscall.Credential{Uid: u.UID, Gid: u.GID, Groups: groups} - } - - return cmd -} diff --git a/pkg/agent/server/modes/host/sessioner.go b/pkg/agent/server/modes/host/sessioner.go deleted file mode 100644 index abc8bda87ec..00000000000 --- a/pkg/agent/server/modes/host/sessioner.go +++ /dev/null @@ -1,435 +0,0 @@ -package host - -import ( - "errors" - "fmt" - "io" - "os" - "os/exec" - "os/user" - "strings" - "sync" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/agent/pkg/osauth" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes/host/command" - "github.com/shellhub-io/shellhub/pkg/agent/server/utmp" - log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" -) - -func newShellCmd(deviceName string, username string, term string) *exec.Cmd { - shell := os.Getenv("SHELL") - - user := new(osauth.OSAuth).LookupUser(username) - - if shell == "" { - shell = user.Shell - } - - if term == "" { - term = "xterm" - } - - cmd := command.NewCmd(user, shell, term, deviceName, shell, "--login") - - return cmd -} - -// NOTICE: Ensures the Sessioner interface is implemented. -var _ modes.Sessioner = (*Sessioner)(nil) - -// Sessioner implements the Sessioner interface when the server is running in host mode. -type Sessioner struct { - mu sync.Mutex - cmds map[string]*exec.Cmd - // deviceName is the device name. - // - // NOTICE: It's a pointer because when the server is created, we don't know the device name yet, that is set later. - deviceName *string -} - -func (s *Sessioner) SetCmds(cmds map[string]*exec.Cmd) { - s.cmds = cmds -} - -// NewSessioner creates a new instance of Sessioner for the host mode. -// The device name is a pointer to a string because when the server is created, we don't know the device name yet, that -// is set later. -func NewSessioner(deviceName *string, cmds map[string]*exec.Cmd) *Sessioner { - return &Sessioner{ - deviceName: deviceName, - cmds: cmds, - } -} - -// Shell manages the SSH shell session of the server when operating in host mode. -func (s *Sessioner) Shell(session gliderssh.Session) error { - sspty, winCh, isPty := session.Pty() - - scmd := newShellCmd(*s.deviceName, session.User(), sspty.Term) - - pts, err := startPty(scmd, session, winCh) - if err != nil { - log.Warn(err) - } - - u := new(osauth.OSAuth).LookupUser(session.User()) - - err = os.Chown(pts.Name(), int(u.UID), -1) - if err != nil { - log.Warn(err) - } - - remoteAddr := session.RemoteAddr() - - log.WithFields(log.Fields{ - "user": session.User(), - "pty": pts.Name(), - "ispty": isPty, - "remoteaddr": remoteAddr, - "localaddr": session.LocalAddr(), - }).Info("Session started") - - ut := utmp.UtmpStartSession( - pts.Name(), - session.User(), - remoteAddr.String(), - ) - - s.mu.Lock() - s.cmds[session.Context().Value(gliderssh.ContextKeySessionID).(string)] = scmd - s.mu.Unlock() - - if err := scmd.Wait(); err != nil { - log.Warn(err) - } - - log.WithFields(log.Fields{ - "user": session.User(), - "pty": pts.Name(), - "remoteaddr": remoteAddr, - "localaddr": session.LocalAddr(), - }).Info("Session ended") - - utmp.UtmpEndSession(ut) - - return nil -} - -// Heredoc handles the server's SSH heredoc session when server is running in host mode. -// -// heredoc is special block of code that contains multi-line strings that will be redirected to a stdin of a shell. It -// request a shell, but doesn't allocate a pty. -func (s *Sessioner) Heredoc(session gliderssh.Session) error { - _, _, isPty := session.Pty() - - cmd := newShellCmd(*s.deviceName, session.User(), "") - - stdout, _ := cmd.StdoutPipe() - stdin, _ := cmd.StdinPipe() - stderr, _ := cmd.StderrPipe() - - serverConn, ok := session.Context().Value(gliderssh.ContextKeyConn).(*gossh.ServerConn) - if !ok { - return fmt.Errorf("failed to get server connection from session context") - } - - go func() { - serverConn.Wait() // nolint:errcheck - cmd.Process.Kill() // nolint:errcheck - }() - - log.WithFields(log.Fields{ - "user": session.User(), - "ispty": isPty, - "remoteaddr": session.RemoteAddr(), - "localaddr": session.LocalAddr(), - "Raw command": session.RawCommand(), - }).Info("Command started") - - err := cmd.Start() - if err != nil { - log.Warn(err) - } - - go func() { - if _, err := io.Copy(stdin, session); err != nil { - fmt.Println(err) //nolint:forbidigo - } - - stdin.Close() - }() - - go func() { - combinedOutput := io.MultiReader(stdout, stderr) - if _, err := io.Copy(session, combinedOutput); err != nil { - fmt.Println(err) //nolint:forbidigo - } - }() - - err = cmd.Wait() - if err != nil { - log.Warn(err) - } - - session.Exit(cmd.ProcessState.ExitCode()) //nolint:errcheck - - log.WithFields(log.Fields{ - "user": session.User(), - "remoteaddr": session.RemoteAddr(), - "localaddr": session.LocalAddr(), - "Raw command": session.RawCommand(), - }).Info("Command ended") - - return nil -} - -// Exec handles the SSH's server exec session when server is running in host mode. -func (s *Sessioner) Exec(session gliderssh.Session) error { - if len(session.Command()) == 0 { - log.WithFields(log.Fields{ - "user": session.User(), - "localaddr": session.LocalAddr(), - }).Error("None command was received") - - log.Info("Session ended") - _ = session.Exit(1) - - return nil - } - - user := new(osauth.OSAuth).LookupUser(session.User()) - sPty, sWinCh, sIsPty := session.Pty() - - shell := os.Getenv("SHELL") - if shell == "" { - shell = user.Shell - } - - term := sPty.Term - if sIsPty && term == "" { - term = "xterm" - } - - cmd := command.NewCmd(user, shell, term, *s.deviceName, shell, "-c", strings.Join(session.Command(), " ")) - - wg := &sync.WaitGroup{} - if sIsPty { - pty, tty, err := initPty(cmd, session, sWinCh) - if err != nil { - log.Warn(err) - } - - defer tty.Close() - defer pty.Close() - - if err := os.Chown(tty.Name(), int(user.UID), -1); err != nil { - log.Warn(err) - } - } else { - stdout, _ := cmd.StdoutPipe() - stdin, _ := cmd.StdinPipe() - stderr, _ := cmd.StderrPipe() - - // relay input from the SSH session to the command. - go func() { - if _, err := io.Copy(stdin, session); err != nil { - fmt.Println(err) //nolint:forbidigo - } - - stdin.Close() - }() - - wg.Add(1) - - // relay the command's combined output and error streams back to the SSH session. - go func() { - defer wg.Done() - combinedOutput := io.MultiReader(stdout, stderr) - if _, err := io.Copy(session, combinedOutput); err != nil { - fmt.Println(err) //nolint:forbidigo - } - }() - } - - log.WithFields(log.Fields{ - "user": session.User(), - "ispty": sIsPty, - "remoteaddr": session.RemoteAddr(), - "localaddr": session.LocalAddr(), - "Raw command": session.RawCommand(), - }).Info("Command started") - - if err := cmd.Start(); err != nil { - return err - } - - if !sIsPty { - wg.Wait() - } - - serverConn, ok := session.Context().Value(gliderssh.ContextKeyConn).(*gossh.ServerConn) - if !ok { - return fmt.Errorf("failed to get server connection from session context") - } - - // kill the process if the SSH connection is interrupted - go func() { - serverConn.Wait() // nolint:errcheck - cmd.Process.Kill() // nolint:errcheck - }() - - if err := cmd.Wait(); err != nil { - log.Warn(err) - } - - log.WithFields(log.Fields{ - "user": session.User(), - "ispty": sIsPty, - "remoteaddr": session.RemoteAddr(), - "localaddr": session.LocalAddr(), - "Raw command": session.RawCommand(), - }).Info("Command ended") - - if err := session.Exit(cmd.ProcessState.ExitCode()); err != nil { // nolint:errcheck - log.Warn(err) - } - - return nil -} - -// SFTP handles the SSH's server sftp session when server is running in host mode. -// -// sftp is a subsystem of SSH that allows file operations over SSH. -func (s *Sessioner) SFTP(session gliderssh.Session) error { - log.WithFields(log.Fields{ - "user": session.Context().User(), - }).Info("SFTP session started") - defer session.Close() - - cmd := exec.Command("/proc/self/exe", []string{"sftp"}...) - - looked, err := user.Lookup(session.User()) - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to lookup user") - - return errors.New("failed to lookup user") - } - - home := fmt.Sprintf("HOME=%s", looked.HomeDir) - gid := fmt.Sprintf("GID=%s", looked.Gid) - uid := fmt.Sprintf("UID=%s", looked.Uid) - - cmd.Env = append(cmd.Env, home) - cmd.Env = append(cmd.Env, gid) - cmd.Env = append(cmd.Env, uid) - - input, err := cmd.StdinPipe() - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to get stdin pipe") - - return errors.New("failed to get stdin pipe") - } - - output, err := cmd.StdoutPipe() - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to get stdout pipe") - - return errors.New("failed to get stdout pipe") - } - - erro, err := cmd.StderrPipe() - if err != nil { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to get stderr pipe") - - return errors.New("failed to get stderr pipe") - } - - if err := cmd.Start(); err != nil { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to start command") - - return errors.New("failed to start command") - } - - go func() { - log.WithFields(log.Fields{ - "user": session.Context().User(), - }).Trace("copying input to session") - - if _, err := io.Copy(input, session); err != nil && err != io.EOF { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to copy stdin to command") - - return - } - - log.WithFields(log.Fields{ - "user": session.Context().User(), - }).Trace("closing input to session ends") - - input.Close() - }() - - go func() { - log.WithFields(log.Fields{ - "user": session.Context().User(), - }).Trace("copying output to session") - - if _, err := io.Copy(session, output); err != nil { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to copy stdout to session") - - return - } - - log.WithFields(log.Fields{ - "user": session.Context().User(), - }).Trace("closing output to session ends") - }() - - go func() { - log.WithFields(log.Fields{ - "user": session.Context().User(), - }).Trace("copying error to session") - - if _, err := io.Copy(session, erro); err != nil { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to copy stderr to session") - - return - } - - log.WithFields(log.Fields{ - "user": session.Context().User(), - }).Trace("closing error to session ends") - }() - - if err = cmd.Wait(); err != nil { - log.WithError(err).WithFields(log.Fields{ - "user": session.Context().User(), - }).Error("Failed to wait command") - - return errors.New("failed to wait command") - } - - log.WithFields(log.Fields{ - "user": session.Context().User(), - }).Info("SFTP session closed") - - return nil -} diff --git a/pkg/agent/server/server.go b/pkg/agent/server/server.go deleted file mode 100644 index f35719c8a82..00000000000 --- a/pkg/agent/server/server.go +++ /dev/null @@ -1,215 +0,0 @@ -package server - -import ( - "net" - "os/exec" - "sync" - "time" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes" - "github.com/shellhub-io/shellhub/pkg/agent/server/modes/host" - "github.com/shellhub-io/shellhub/pkg/api/client" - "github.com/shellhub-io/shellhub/pkg/models" - log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" -) - -// List of SSH subsystems names supported by the agent. -const ( - // SFTPSubsystemName is the name of the SFTP subsystem. - SFTPSubsystemName = "sftp" -) - -type sshConn struct { - net.Conn - closeCallback func(string) - ctx gliderssh.Context -} - -func (c *sshConn) Close() error { - if id, ok := c.ctx.Value(gliderssh.ContextKeySessionID).(string); ok { - c.closeCallback(id) - } - - return c.Conn.Close() -} - -type Server struct { - sshd *gliderssh.Server - api client.Client - authData *models.DeviceAuthResponse - cmds map[string]*exec.Cmd - deviceName string - containerID string - mu sync.Mutex - keepAliveInterval int - singleUserPassword string - - // mode is the mode of the server, identifing where and how the SSH's server is running. - // - // For example, the [modes.HostMode] means that the SSH's server runs in the host machine, using the host - // `/etc/passwd`, `/etc/shadow`, redirecting the SSH's connection to the device sdin, stdout and stderr and etc. - // - // Check the [modes] package for more information. - mode modes.Mode - Sessions sync.Map -} - -// SSH channels supported by the SSH server. -// -// An SSH channel refers to a communication link established between a client and a server. SSH channels are multiplexed -// over a single encrypted connection, facilitating concurrent and secure communication for various purposes. -// -// SSH_MSG_CHANNEL_OPEN -// -// Check www.ietf.org/rfc/rfc4254.txt for more information. -const ( - // ChannelSession refers to a type of SSH channel that is established between a client and a server for interactive - // shell sessions or command execution. SSH channels are used to multiplex multiple logical communication channels - // over a single SSH connection. - // - // Check www.ietf.org/rfc/rfc4254.txt at section 6.1 for more information. - ChannelSession string = "session" - // ChannelDirectTcpip is the channel type in SSH is used to establish a direct TCP/IP connection between the SSH - // client and a target host through the SSH server. This channel type allows the client to initiate a connection to - // a specific destination host and port, and the SSH server acts as a bridge to facilitate this connection. - // - // Check www.ietf.org/rfc/rfc4254.txt at section 7.2 for more information. - ChannelDirectTcpip string = "direct-tcpip" -) - -// NewServer creates a new server SSH agent server. -func NewServer(api client.Client, authData *models.DeviceAuthResponse, privateKey string, keepAliveInterval int, singleUserPassword string, mode modes.Mode) *Server { - server := &Server{ - api: api, - authData: authData, - cmds: make(map[string]*exec.Cmd), - keepAliveInterval: keepAliveInterval, - singleUserPassword: singleUserPassword, - mode: mode, - Sessions: sync.Map{}, - } - - if m, ok := mode.(*host.Mode); ok { - m.Sessioner.SetCmds(server.cmds) - } - - server.sshd = &gliderssh.Server{ - PasswordHandler: server.passwordHandler, - PublicKeyHandler: server.publicKeyHandler, - Handler: server.sessionHandler, - SessionRequestCallback: server.sessionRequestCallback, - SubsystemHandlers: map[string]gliderssh.SubsystemHandler{ - SFTPSubsystemName: server.sftpSubsystemHandler, - }, - ConnCallback: func(ctx gliderssh.Context, conn net.Conn) net.Conn { - closeCallback := func(id string) { - server.mu.Lock() - defer server.mu.Unlock() - - if v, ok := server.cmds[id]; ok { - v.Process.Kill() // nolint:errcheck - delete(server.cmds, id) - } - } - - return &sshConn{conn, closeCallback, ctx} - }, - LocalPortForwardingCallback: func(ctx gliderssh.Context, destinationHost string, destinationPort uint32) bool { - return true - }, - ReversePortForwardingCallback: func(ctx gliderssh.Context, destinationHost string, destinationPort uint32) bool { - return false - }, - ChannelHandlers: map[string]gliderssh.ChannelHandler{ - ChannelSession: gliderssh.DefaultSessionHandler, - ChannelDirectTcpip: gliderssh.DirectTCPIPHandler, - }, - } - - err := server.sshd.SetOption(gliderssh.HostKeyFile(privateKey)) - if err != nil { - log.Warn(err) - } - - return server -} - -// startKeepAlive sends a keep alive message to the server every in keepAliveInterval seconds. -func (s *Server) startKeepAliveLoop(session gliderssh.Session) { - interval := time.Duration(s.keepAliveInterval) * time.Second - - ticker := time.NewTicker(interval) - defer ticker.Stop() - - log.WithFields(log.Fields{ - "interval": interval, - }).Debug("Starting keep alive loop") - -loop: - for { - select { - case <-ticker.C: - if conn, ok := session.Context().Value(gliderssh.ContextKeyConn).(gossh.Conn); ok { - if _, _, err := conn.SendRequest("keepalive", false, nil); err != nil { - log.Error(err) - } - } - case <-session.Context().Done(): - log.Debug("Stopping keep alive loop after session closed") - ticker.Stop() - - break loop - } - } -} - -// List of request types that are supported by SSH. -// -// Once the session has been set up, a program is started at the remote end. The program can be a shell, an application -// program, or a subsystem with a host-independent name. Only one of these requests can succeed per channel. -// -// Check www.ietf.org/rfc/rfc4254.txt at section 6.5 for more information. -const ( - // RequestTypeShell is the request type for shell. - RequestTypeShell = "shell" - // RequestTypeExec is the request type for exec. - RequestTypeExec = "exec" - // RequestTypeSubsystem is the request type for any subsystem. - RequestTypeSubsystem = "subsystem" - // RequestTypeUnknown is the request type for unknown. - // - // It is not a valid request type documentated by SSH's RFC, but it can be useful to identify the request type when - // it is not known. - RequestTypeUnknown = "unknown" -) - -func (s *Server) sessionRequestCallback(session gliderssh.Session, requestType string) bool { - session.Context().SetValue("request_type", requestType) - - return true -} - -func (s *Server) HandleConn(conn net.Conn) { - s.sshd.HandleConn(conn) -} - -func (s *Server) SetDeviceName(name string) { - s.deviceName = name -} - -func (s *Server) SetContainerID(id string) { - s.containerID = id -} - -func (s *Server) CloseSession(id string) { - if session, ok := s.Sessions.Load(id); ok { - session.(net.Conn).Close() - s.Sessions.Delete(id) - } -} - -func (s *Server) ListenAndServe() error { - return s.sshd.ListenAndServe() -} diff --git a/pkg/agent/server/session.go b/pkg/agent/server/session.go deleted file mode 100644 index fd6a2c9cf37..00000000000 --- a/pkg/agent/server/session.go +++ /dev/null @@ -1,75 +0,0 @@ -package server - -import ( - "fmt" - - gliderssh "github.com/gliderlabs/ssh" - log "github.com/sirupsen/logrus" -) - -// Type is the type of SSH session. -type Type string - -const ( - // SessionTypeShell is the session's type returned when the SSH client requests a shell. - SessionTypeShell Type = "shell" - // SessionTypeHeredoc is the session's type returned when the SSH client requests a command execution with a heredoc. - // "heredoc" is a format that does not require a TTY, but attaches the client input to the command's stdin. - // It is used to execute a sequence of commands in a single SSH connection without the need to open a shell. - SessionTypeHeredoc Type = "heredoc" - // SessionTypeExec is the session's type returned when the SSH client requests a command execution. - SessionTypeExec Type = "exec" - // SessionTypeSubsystem is the session's type returned when the SSH client requests a subsystem. - SessionTypeSubsystem Type = "subsystem" - // SessionTypeUnknown is the session's type returned when the SSH client requests an unknown session type. - SessionTypeUnknown Type = "unknown" -) - -// GetSessionType returns the session's type based on the SSH client session. -func GetSessionType(session gliderssh.Session) (Type, error) { - _, _, isPty := session.Pty() - - requestType, ok := session.Context().Value("request_type").(string) - if !ok { - return SessionTypeUnknown, fmt.Errorf("failed to get request type from session context") - } - - switch { - case isPty && requestType == RequestTypeShell: - return SessionTypeShell, nil - case !isPty && requestType == RequestTypeShell: - return SessionTypeHeredoc, nil - case requestType == RequestTypeExec: - return SessionTypeExec, nil - case requestType == RequestTypeSubsystem: - return SessionTypeSubsystem, nil - default: - return SessionTypeUnknown, nil - } -} - -func (s *Server) sessionHandler(session gliderssh.Session) { - log.Info("New session request") - - go s.startKeepAliveLoop(session) - - sessionType, err := GetSessionType(session) - if err != nil { - log.Error(err) - - return - } - - log.WithField("type", sessionType).Info("Request type got") - - switch sessionType { - case SessionTypeShell: - s.mode.Shell(session) //nolint:errcheck - case SessionTypeHeredoc: - s.mode.Heredoc(session) //nolint:errcheck - default: - s.mode.Exec(session) //nolint:errcheck - } - - log.Info("Session ended") -} diff --git a/pkg/agent/sftp.go b/pkg/agent/sftp.go deleted file mode 100644 index 60a060e8f38..00000000000 --- a/pkg/agent/sftp.go +++ /dev/null @@ -1,102 +0,0 @@ -package agent - -import ( - "errors" - "fmt" - "io" - "os" - "strconv" - "syscall" - - "github.com/pkg/sftp" -) - -type pipe struct { - in *os.File - out *os.File - err *os.File -} - -func (p *pipe) Read(data []byte) (int, error) { - return p.in.Read(data) -} - -func (p *pipe) Write(data []byte) (int, error) { - return p.out.Write(data) -} - -func (p *pipe) Close() error { - os.Exit(0) - - return nil -} - -// NewSFTPServer creates a new SFTP server when a new session is created between the agent and the server. -func NewSFTPServer() { - piped := &pipe{os.Stdin, os.Stdout, os.Stderr} - - if err := syscall.Chroot("/host"); err != nil { - fmt.Fprintln(os.Stderr, err) - } - - home, ok := os.LookupEnv("HOME") - if !ok { - fmt.Fprintln(os.Stderr, errors.New("HOME environment variable not set")) - - return - } - - toInt := func(s string, ok bool) (int, error) { - i, err := strconv.Atoi(s) - if err != nil { - return 0, err - } - - return i, nil - } - - gid, err := toInt(os.LookupEnv("GID")) - if err != nil { - fmt.Fprintln(os.Stderr, errors.New("GID environment variable not set")) - - return - } - - uid, err := toInt(os.LookupEnv("UID")) - if err != nil { - fmt.Fprintln(os.Stderr, errors.New("UID environment variable not set")) - - return - } - - if err := syscall.Chdir(home); err != nil { - fmt.Fprintln(os.Stderr, err) - - return - } - - if err := syscall.Setgid(gid); err != nil { - fmt.Fprintln(os.Stderr, err) - - return - } - - if err := syscall.Setuid(uid); err != nil { - fmt.Fprintln(os.Stderr, err) - - return - } - - server, err := sftp.NewServer(piped, []sftp.ServerOption{}...) - if err != nil { - fmt.Fprintln(os.Stderr, err) - - return - } - - if err := server.Serve(); err != io.EOF { - fmt.Fprintln(os.Stderr, err) - } - - server.Close() -} diff --git a/pkg/api/authorizer/claims.go b/pkg/api/authorizer/claims.go new file mode 100644 index 00000000000..0aade227695 --- /dev/null +++ b/pkg/api/authorizer/claims.go @@ -0,0 +1,22 @@ +package authorizer + +// UserClaims represents the attributes needed to authenticate a user. +type UserClaims struct { + ID string `json:"id"` + Origin string `json:"origin"` + // TenantID is the identifier of the tenant to which the claims belongs. + // It's optional. + TenantID string `json:"tenant"` + Role Role `json:"-"` + Username string `json:"name"` + // MFA indicates whether multi-factor authentication is enabled for the user. + MFA bool `json:"mfa"` + // Admin indicates whether the user has administrative privileges. + Admin bool `json:"admin"` +} + +// DeviceClaims represents the attributes needed to authenticate a device. +type DeviceClaims struct { + UID string `json:"uid"` + TenantID string `json:"tenant"` +} diff --git a/pkg/api/authorizer/permissions.go b/pkg/api/authorizer/permissions.go new file mode 100644 index 00000000000..a3b4899dbc4 --- /dev/null +++ b/pkg/api/authorizer/permissions.go @@ -0,0 +1,178 @@ +package authorizer + +type Permission int + +const ( + DeviceAccept Permission = iota + DeviceReject + DeviceUpdate + DeviceRemove + DeviceConnect + DeviceRename + DeviceDetails + + TagCreate + TagUpdate + TagDelete + + SessionPlay + SessionClose + SessionRemove + SessionDetails + + FirewallCreate + FirewallEdit + FirewallRemove + + PublicKeyCreate + PublicKeyEdit + PublicKeyRemove + + NamespaceUpdate + NamespaceAddMember + NamespaceRemoveMember + NamespaceEditMember + NamespaceEnableSessionRecord + NamespaceDelete + + BillingCreateCustomer + BillingChooseDevices + BillingAddPaymentMethod + BillingUpdatePaymentMethod + BillingRemovePaymentMethod + BillingCancelSubscription + BillingCreateSubscription + BillingGetPaymentMethod + BillingGetSubscription + + APIKeyCreate + APIKeyUpdate + APIKeyDelete + + ConnectorDelete + ConnectorUpdate + ConnectorSet + + TunnelsCreate + TunnelsDelete +) + +var observerPermissions = []Permission{ + DeviceConnect, + DeviceDetails, + + SessionDetails, +} + +var operatorPermissions = []Permission{ + DeviceAccept, + DeviceReject, + DeviceConnect, + DeviceRename, + DeviceDetails, + DeviceUpdate, + + TagCreate, + TagUpdate, + TagDelete, + + SessionDetails, +} + +var adminPermissions = []Permission{ + DeviceAccept, + DeviceReject, + DeviceRemove, + DeviceConnect, + DeviceRename, + DeviceDetails, + DeviceUpdate, + + TagCreate, + TagUpdate, + TagDelete, + + SessionPlay, + SessionClose, + SessionRemove, + SessionDetails, + + FirewallCreate, + FirewallEdit, + FirewallRemove, + + PublicKeyCreate, + PublicKeyEdit, + PublicKeyRemove, + + NamespaceUpdate, + NamespaceAddMember, + NamespaceRemoveMember, + NamespaceEditMember, + NamespaceEnableSessionRecord, + + APIKeyCreate, + APIKeyUpdate, + APIKeyDelete, + + ConnectorDelete, + ConnectorUpdate, + ConnectorSet, + + TunnelsCreate, + TunnelsDelete, +} + +var ownerPermissions = []Permission{ + DeviceAccept, + DeviceReject, + DeviceRemove, + DeviceConnect, + DeviceRename, + DeviceDetails, + DeviceUpdate, + + TagCreate, + TagUpdate, + TagDelete, + + SessionPlay, + SessionClose, + SessionRemove, + SessionDetails, + + FirewallCreate, + FirewallEdit, + FirewallRemove, + + PublicKeyCreate, + PublicKeyEdit, + PublicKeyRemove, + + NamespaceUpdate, + NamespaceAddMember, + NamespaceRemoveMember, + NamespaceEditMember, + NamespaceEnableSessionRecord, + NamespaceDelete, + + BillingCreateCustomer, + BillingChooseDevices, + BillingAddPaymentMethod, + BillingUpdatePaymentMethod, + BillingRemovePaymentMethod, + BillingCancelSubscription, + BillingCreateSubscription, + BillingGetSubscription, + + APIKeyCreate, + APIKeyUpdate, + APIKeyDelete, + + ConnectorDelete, + ConnectorUpdate, + ConnectorSet, + + TunnelsCreate, + TunnelsDelete, +} diff --git a/pkg/api/authorizer/role.go b/pkg/api/authorizer/role.go new file mode 100644 index 00000000000..8045cf8e36f --- /dev/null +++ b/pkg/api/authorizer/role.go @@ -0,0 +1,105 @@ +package authorizer + +import "slices" + +// Role defines a user access level. +type Role string + +const ( + // RoleInvalid represents an invalid role. Any operation with this role will + // be rejected. + RoleInvalid Role = "" + // RoleObserver represents a namespace observer. An observer can only connect to a + // device and retrieve device and session details. + RoleObserver Role = "observer" + // RoleOperator represents a namespace operator. An operator has only device-related + // permissions, excluding the [DeviceRemove] permission. An operator also has the + // [SessionDetails] permission. + RoleOperator Role = "operator" + // RoleAdministrator represents a namespace administrator. An administrator has + // similar permissions to [RoleOwner] but cannot delete the namespace. They also do + // not have permission for any billing-related actions. + RoleAdministrator Role = "administrator" + // RoleOwner represents a namespace owner. The owner has all permissions. + RoleOwner Role = "owner" +) + +// RoleFromString returns the Role corresponding to the given string. +// If the string is not a valid role, it returns [RoleInvalid]. +func RoleFromString(str string) Role { + switch str { + case "owner": + return RoleOwner + case "administrator": + return RoleAdministrator + case "operator": + return RoleOperator + case "observer": + return RoleObserver + default: + return RoleInvalid + } +} + +// String converts the given role to its corresponding string. +// If the string is not a valid role, it returns "N/A". +func (r Role) String() string { + switch r { + case RoleOwner: + return "owner" + case RoleAdministrator: + return "administrator" + case RoleOperator: + return "operator" + case RoleObserver: + return "observer" + default: + return "" + } +} + +// code converts the given role to its corresponding integer. +// If the role is not a valid one, it returns 0. +func (r Role) code() int { + switch r { + case RoleOwner: + return 4 + case RoleAdministrator: + return 3 + case RoleOperator: + return 2 + case RoleObserver: + return 1 + default: + return 0 + } +} + +// Permissions returns all permissions associated with the role r. +// If the role is [RoleInvalid], it returns an empty slice. +func (r Role) Permissions() []Permission { + permissions := make([]Permission, 0) + switch r { + case RoleOwner: + permissions = ownerPermissions + case RoleAdministrator: + permissions = adminPermissions + case RoleOperator: + permissions = operatorPermissions + case RoleObserver: + permissions = observerPermissions + } + + return permissions +} + +// HasPermission reports whether the role r has the specified permission. +func (r Role) HasPermission(permission Permission) bool { + return slices.Contains(r.Permissions(), permission) +} + +// HasAuthority reports whether the role r has greater or equal authority compared to the passive role. +// It always returns false if either role is invalid or if the passive role is [RoleOwner]. +func (r Role) HasAuthority(passive Role) bool { + return passive != RoleOwner && r.code() >= passive.code() +} diff --git a/pkg/api/authorizer/role_test.go b/pkg/api/authorizer/role_test.go new file mode 100644 index 00000000000..dd82875f7bb --- /dev/null +++ b/pkg/api/authorizer/role_test.go @@ -0,0 +1,232 @@ +package authorizer_test + +import ( + "testing" + + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/stretchr/testify/require" +) + +func TestRoleFromString(t *testing.T) { + cases := []struct { + description string + role string + expected authorizer.Role + }{ + { + description: "fails with invalid roles", + role: "N/A", + expected: authorizer.RoleInvalid, + }, + { + description: "succeeds with 'owner'", + role: "owner", + expected: authorizer.RoleOwner, + }, + { + description: "succeeds with 'administrator'", + role: "administrator", + expected: authorizer.RoleAdministrator, + }, + { + description: "succeeds with 'operator'", + role: "operator", + expected: authorizer.RoleOperator, + }, + { + description: "succeeds with 'observer'", + role: "observer", + expected: authorizer.RoleObserver, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + require.Equal(tt, tc.expected, authorizer.RoleFromString(tc.role)) + }) + } +} + +func TestRolePermissions(t *testing.T) { + cases := []struct { + description string + role authorizer.Role + expected []authorizer.Permission + }{ + { + description: "fails with invalid roles", + role: authorizer.RoleInvalid, + expected: []authorizer.Permission{}, + }, + { + description: "succeeds with 'owner'", + role: authorizer.RoleOwner, + expected: []authorizer.Permission{ + authorizer.DeviceAccept, + authorizer.DeviceReject, + authorizer.DeviceRemove, + authorizer.DeviceConnect, + authorizer.DeviceRename, + authorizer.DeviceDetails, + authorizer.DeviceUpdate, + authorizer.TagCreate, + authorizer.TagUpdate, + authorizer.TagDelete, + authorizer.SessionPlay, + authorizer.SessionClose, + authorizer.SessionRemove, + authorizer.SessionDetails, + authorizer.FirewallCreate, + authorizer.FirewallEdit, + authorizer.FirewallRemove, + authorizer.PublicKeyCreate, + authorizer.PublicKeyEdit, + authorizer.PublicKeyRemove, + authorizer.NamespaceUpdate, + authorizer.NamespaceAddMember, + authorizer.NamespaceRemoveMember, + authorizer.NamespaceEditMember, + authorizer.NamespaceEnableSessionRecord, + authorizer.NamespaceDelete, + authorizer.BillingCreateCustomer, + authorizer.BillingChooseDevices, + authorizer.BillingAddPaymentMethod, + authorizer.BillingUpdatePaymentMethod, + authorizer.BillingRemovePaymentMethod, + authorizer.BillingCancelSubscription, + authorizer.BillingCreateSubscription, + authorizer.BillingGetSubscription, + authorizer.APIKeyCreate, + authorizer.APIKeyUpdate, + authorizer.APIKeyDelete, + authorizer.ConnectorDelete, + authorizer.ConnectorUpdate, + authorizer.ConnectorSet, + authorizer.TunnelsCreate, + authorizer.TunnelsDelete, + }, + }, + { + description: "succeeds with 'administrator'", + role: authorizer.RoleAdministrator, + expected: []authorizer.Permission{ + authorizer.DeviceAccept, + authorizer.DeviceReject, + authorizer.DeviceRemove, + authorizer.DeviceConnect, + authorizer.DeviceRename, + authorizer.DeviceDetails, + authorizer.DeviceUpdate, + authorizer.TagCreate, + authorizer.TagUpdate, + authorizer.TagDelete, + authorizer.SessionPlay, + authorizer.SessionClose, + authorizer.SessionRemove, + authorizer.SessionDetails, + authorizer.FirewallCreate, + authorizer.FirewallEdit, + authorizer.FirewallRemove, + authorizer.PublicKeyCreate, + authorizer.PublicKeyEdit, + authorizer.PublicKeyRemove, + authorizer.NamespaceUpdate, + authorizer.NamespaceAddMember, + authorizer.NamespaceRemoveMember, + authorizer.NamespaceEditMember, + authorizer.NamespaceEnableSessionRecord, + authorizer.APIKeyCreate, + authorizer.APIKeyUpdate, + authorizer.APIKeyDelete, + authorizer.ConnectorDelete, + authorizer.ConnectorUpdate, + authorizer.ConnectorSet, + authorizer.TunnelsCreate, + authorizer.TunnelsDelete, + }, + }, + { + description: "succeeds with 'operator'", + role: authorizer.RoleOperator, + expected: []authorizer.Permission{ + authorizer.DeviceAccept, + authorizer.DeviceReject, + authorizer.DeviceConnect, + authorizer.DeviceRename, + authorizer.DeviceDetails, + authorizer.DeviceUpdate, + authorizer.TagCreate, + authorizer.TagUpdate, + authorizer.TagDelete, + authorizer.SessionDetails, + }, + }, + { + description: "succeeds with 'observer'", + role: authorizer.RoleObserver, + expected: []authorizer.Permission{ + authorizer.DeviceConnect, + authorizer.DeviceDetails, + authorizer.SessionDetails, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + require.Equal(tt, tc.expected, tc.role.Permissions()) + }) + } +} + +func TestRolePreferences(t *testing.T) { + cases := []struct { + description string + role authorizer.Role + greater []authorizer.Role + less []authorizer.Role + }{ + { + description: authorizer.RoleInvalid.String(), + role: authorizer.RoleInvalid, + greater: []authorizer.Role{authorizer.RoleOwner, authorizer.RoleAdministrator, authorizer.RoleOperator, authorizer.RoleObserver}, + less: []authorizer.Role{}, + }, + { + description: authorizer.RoleOwner.String(), + role: authorizer.RoleOwner, + greater: []authorizer.Role{}, + less: []authorizer.Role{authorizer.RoleAdministrator, authorizer.RoleOperator, authorizer.RoleObserver}, + }, + { + description: authorizer.RoleAdministrator.String(), + role: authorizer.RoleAdministrator, + greater: []authorizer.Role{authorizer.RoleOwner}, + less: []authorizer.Role{authorizer.RoleOperator, authorizer.RoleObserver}, + }, + { + description: authorizer.RoleOperator.String(), + role: authorizer.RoleOperator, + greater: []authorizer.Role{authorizer.RoleOwner, authorizer.RoleAdministrator}, + less: []authorizer.Role{authorizer.RoleObserver}, + }, + { + description: authorizer.RoleObserver.String(), + role: authorizer.RoleObserver, + greater: []authorizer.Role{authorizer.RoleOwner, authorizer.RoleAdministrator, authorizer.RoleOperator}, + less: []authorizer.Role{}, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(tt *testing.T) { + for _, r := range tc.greater { + require.Equal(tt, false, tc.role.HasAuthority(r)) + } + + for _, r := range tc.less { + require.Equal(tt, true, tc.role.HasAuthority(r)) + } + }) + } +} diff --git a/pkg/api/client/client.go b/pkg/api/client/client.go index e52f540dec6..b7c41098ce8 100644 --- a/pkg/api/client/client.go +++ b/pkg/api/client/client.go @@ -5,13 +5,16 @@ import ( "errors" "fmt" "math" + "math/rand/v2" "net" "net/http" "net/url" + "strconv" + "time" resty "github.com/go-resty/resty/v2" + "github.com/shellhub-io/shellhub/pkg/api/client/reverser" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/pkg/revdial" log "github.com/sirupsen/logrus" ) @@ -25,7 +28,12 @@ type publicAPI interface { Endpoints() (*models.Endpoints, error) AuthDevice(req *models.DeviceAuthRequest) (*models.DeviceAuthResponse, error) AuthPublicKey(req *models.PublicKeyAuthRequest, token string) (*models.PublicKeyAuthResponse, error) - NewReverseListener(ctx context.Context, token string) (*revdial.Listener, error) + // NewReverseListener creates a new reverse listener to be used by the Agent to connect to ShellHub's SSH server + // using RevDial protocol. + NewReverseListenerV1(ctx context.Context, token string, path string) (net.Listener, error) + // NewReverseListenerV2 creates a new reverse listener to be used by the Agent to connect to ShellHub's SSH server + // using Yamux protocol. + NewReverseListenerV2(ctx context.Context, token string, path string, cfg *ReverseListenerV2Config) (net.Listener, error) } //go:generate mockery --name=Client --filename=client.go @@ -41,7 +49,7 @@ type client struct { http *resty.Client logger *log.Logger // reverser is used to create a reverse listener to Agent from ShellHub's SSH server. - reverser IReverser + reverser reverser.Reverser } var ErrParseAddress = fmt.Errorf("could not parse the address to the required format") @@ -55,6 +63,24 @@ func NewClient(address string, opts ...Opt) (Client, error) { return nil, errors.Join(ErrParseAddress, err) } + const RetryAfterHeader string = "Retry-After" + + // MaxRetryWaitTime is the default value for wait time between retries. + const MaxRetryWaitTime time.Duration = 1 * time.Hour + + randomWaitTimeSecs := func() time.Duration { + const MinRetryAfterSecs int = 5 + const MaxRetryAfterSecs int = 65 + + t := time.Duration(rand.IntN(MaxRetryAfterSecs-MinRetryAfterSecs)+MinRetryAfterSecs) * time.Second //nolint:gosec + + log.WithFields(log.Fields{ + "retry_after": t, + }).Warn("retrying request after a random time period") + + return t + } + client := new(client) client.http = resty.New() client.http.SetRetryCount(math.MaxInt32) @@ -62,13 +88,27 @@ func NewClient(address string, opts ...Opt) (Client, error) { client.http.SetBaseURL(uri.String()) client.http.AddRetryCondition(func(r *resty.Response, err error) bool { if _, ok := err.(net.Error); ok { + log.WithFields(log.Fields{ + "url": r.Request.URL, + }).WithError(err).Error("network error") + return true } - if r.StatusCode() >= http.StatusInternalServerError && r.StatusCode() != http.StatusNotImplemented { + switch { + case r.StatusCode() == http.StatusTooManyRequests: + log.WithFields(log.Fields{ + "status_code": r.StatusCode(), + "url": r.Request.URL, + "data": r.String(), + }).Warn("too many requests") + + return true + case r.StatusCode() >= http.StatusInternalServerError && r.StatusCode() != http.StatusNotImplemented: log.WithFields(log.Fields{ "status_code": r.StatusCode(), "url": r.Request.URL, + "data": r.String(), }).Warn("failed to achieve the server") return true @@ -76,6 +116,34 @@ func NewClient(address string, opts ...Opt) (Client, error) { return false }) + client.http.SetRetryAfter(func(c *resty.Client, r *resty.Response) (time.Duration, error) { + switch r.StatusCode() { + case http.StatusTooManyRequests, http.StatusServiceUnavailable: + retryAfterHeader := r.Header().Get(RetryAfterHeader) + if retryAfterHeader == "" { + return randomWaitTimeSecs(), nil + } + + // NOTE: The `Retry-After` supports delay in seconds and and a date time, but currently we will support only + // one of them. + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Retry-After + retryAfterSeconds, err := strconv.Atoi(retryAfterHeader) + if err != nil { + return randomWaitTimeSecs(), err + } + + log.WithFields(log.Fields{ + "status": r.StatusCode(), + "retry_after": retryAfterSeconds, + "url": r.Request.URL, + }).Warn("retrying request after a defined time period") + + return time.Duration(retryAfterSeconds) * time.Second, nil + default: + return randomWaitTimeSecs(), nil + } + }) + client.http.SetRetryMaxWaitTime(MaxRetryWaitTime) if client.logger != nil { client.http.SetLogger(&LeveledLogger{client.logger}) diff --git a/pkg/api/client/client_common_test.go b/pkg/api/client/client_common_test.go index 796268b87d7..11fec1152bf 100644 --- a/pkg/api/client/client_common_test.go +++ b/pkg/api/client/client_common_test.go @@ -24,7 +24,7 @@ func TestListDevices(t *testing.T) { }{ { description: "success to list devices when its list is empty", - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := httpmock.NewJsonResponder(200, []models.Device{{}}) httpmock.RegisterResponder("GET", "/api/devices", responder) }, @@ -35,7 +35,7 @@ func TestListDevices(t *testing.T) { }, { description: "success to list devices when its list is not empty", - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := httpmock.NewJsonResponder(200, []models.Device{ { UID: "3a471bd84c88b28c4e4f8e27caee40e7b14798325e6dd85aa62d54e27fd11117", @@ -60,7 +60,7 @@ func TestListDevices(t *testing.T) { }, { description: "success to list devices after retry", - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { fail := httpmock.NewErrorResponder(errors.New("error on request")) success, _ := httpmock.NewJsonResponder(200, []models.Device{ { @@ -91,7 +91,7 @@ func TestListDevices(t *testing.T) { }, { description: "failed when resource is not found", - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := httpmock.NewJsonResponder(404, nil) httpmock.RegisterResponder("GET", "/api/devices", responder) }, @@ -102,7 +102,7 @@ func TestListDevices(t *testing.T) { }, { description: "failed when request is missformated", - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := httpmock.NewJsonResponder(400, nil) httpmock.RegisterResponder("GET", "/api/devices", responder) }, @@ -113,7 +113,7 @@ func TestListDevices(t *testing.T) { }, { description: "failed when request return an unmaped status code", - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := httpmock.NewJsonResponder(418, nil) httpmock.RegisterResponder("GET", "/api/devices", responder) }, diff --git a/pkg/api/client/client_public.go b/pkg/api/client/client_public.go index 2ff58f1bc85..b194667e317 100644 --- a/pkg/api/client/client_public.go +++ b/pkg/api/client/client_public.go @@ -3,10 +3,16 @@ package client import ( "context" "errors" + "net" + "net/http" + "net/url" + "os" + "time" resty "github.com/go-resty/resty/v2" + "github.com/hashicorp/yamux" "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/pkg/revdial" + "github.com/shellhub-io/shellhub/pkg/wsconnadapter" log "github.com/sirupsen/logrus" ) @@ -31,7 +37,7 @@ func (c *client) AuthDevice(req *models.DeviceAuthRequest) (*models.DeviceAuthRe var res *models.DeviceAuthResponse response, err := c.http.R(). - AddRetryCondition(func(r *resty.Response, err error) bool { + AddRetryCondition(func(r *resty.Response, _ error) bool { identity := func(mac, hostname string) string { if mac != "" { return mac @@ -45,6 +51,7 @@ func (c *client) AuthDevice(req *models.DeviceAuthRequest) (*models.DeviceAuthRe "tenant_id": req.TenantID, "identity": identity(req.Identity.MAC, req.Hostname), "status_code": r.StatusCode(), + "data": r.String(), }).Warn("failed to authenticate device") return true @@ -102,22 +109,166 @@ func (c *client) AuthPublicKey(req *models.PublicKeyAuthRequest, token string) ( return res, nil } -// NewReverseListener creates a new reverse listener connection for the Agent from ShellHub's SSH server. -// -// Every time the ShellHub's SSH server receives a new connection to the Agent, the server sends that connection -// through this listener to the device, but only if it is accepted by the server. -// -// To obtain this listener from the server, the Agent needs to authenticates it using the token provided, and getting a -// reverse authenticated connection, after that, it dials the server again for a new reverse connection on ShellHub's -// SSH tunnel list. -func (c *client) NewReverseListener(ctx context.Context, token string) (*revdial.Listener, error) { +// NewReverseListener creates a new reverse listener connection to ShellHub's server. This listener receives the SSH +// requests coming from the ShellHub server. Only authenticated devices can obtain a listener connection. +func (c *client) NewReverseListenerV1(ctx context.Context, token string, path string) (net.Listener, error) { if token == "" { return nil, errors.New("token is empty") } - if err := c.reverser.Auth(ctx, token); err != nil { + if err := c.reverser.Auth(ctx, token, path); err != nil { return nil, err } return c.reverser.NewListener() } + +type ReverseListenerV2Config struct { + // AcceptBacklog is used to limit how many streams may be + // waiting an accept. + AcceptBacklog int `json:"yamux_accept_backlog"` + + // EnableKeepalive is used to do a period keep alive + // messages using a ping. + EnableKeepAlive bool `json:"yamux_enable_keep_alive"` + + // KeepAliveInterval is how often to perform the keep alive + KeepAliveInterval time.Duration `json:"yamux_keep_alive_interval"` + + // ConnectionWriteTimeout is meant to be a "safety valve" timeout after + // we which will suspect a problem with the underlying connection and + // close it. This is only applied to writes, where's there's generally + // an expectation that things will move along quickly. + ConnectionWriteTimeout time.Duration `json:"yamux_connection_write_timeout"` + + // MaxStreamWindowSize is used to control the maximum + // window size that we allow for a stream. + MaxStreamWindowSize uint32 `json:"yamux_max_stream_window_size"` + + // StreamOpenTimeout is the maximum amount of time that a stream will + // be allowed to remain in pending state while waiting for an ack from the peer. + // Once the timeout is reached the session will be gracefully closed. + // A zero value disables the StreamOpenTimeout allowing unbounded + // blocking on OpenStream calls. + StreamOpenTimeout time.Duration `json:"yamux_stream_open_timeout"` + + // StreamCloseTimeout is the maximum time that a stream will allowed to + // be in a half-closed state when `Close` is called before forcibly + // closing the connection. Forcibly closed connections will empty the + // receive buffer, drop any future packets received for that stream, + // and send a RST to the remote side. + StreamCloseTimeout time.Duration `json:"yamux_stream_close_timeout"` +} + +var DefaultReverseListenerV2Config = ReverseListenerV2Config{ + AcceptBacklog: 256, + EnableKeepAlive: true, + KeepAliveInterval: 35 * time.Second, + ConnectionWriteTimeout: 15 * time.Second, + MaxStreamWindowSize: 256 * 1024, + StreamCloseTimeout: 5 * time.Minute, + StreamOpenTimeout: 75 * time.Second, +} + +// NewReverseV2ConfigFromMap creates a new Config from a map[string]any received from auth data from the server +// or returns the default config if the map is nil. If a key is missing, the default value is used. +func NewReverseV2ConfigFromMap(m map[string]any) *ReverseListenerV2Config { + cfg := DefaultReverseListenerV2Config + + if v, ok := m["yamux_accept_backlog"].(int); ok { + cfg.AcceptBacklog = v + } + + if v, ok := m["yamux_enable_keep_alive"].(bool); ok { + cfg.EnableKeepAlive = v + } + + if v, ok := m["yamux_keep_alive_interval"].(time.Duration); ok { + cfg.KeepAliveInterval = v + } + + if v, ok := m["yamux_connection_write_timeout"].(time.Duration); ok { + cfg.ConnectionWriteTimeout = v + } + + if v, ok := m["yamux_max_stream_window_size"].(uint32); ok { + cfg.MaxStreamWindowSize = v + } + + if v, ok := m["yamux_stream_open_timeout"].(time.Duration); ok { + cfg.StreamOpenTimeout = v + } + + if v, ok := m["yamux_stream_close_timeout"].(time.Duration); ok { + cfg.StreamCloseTimeout = v + } + + return &cfg +} + +func YamuxConfigFromReverseListenerV2(cfg *ReverseListenerV2Config) *yamux.Config { + if cfg == nil { + cfg = &DefaultReverseListenerV2Config + } + + return &yamux.Config{ + AcceptBacklog: cfg.AcceptBacklog, + EnableKeepAlive: cfg.EnableKeepAlive, + KeepAliveInterval: cfg.KeepAliveInterval, + ConnectionWriteTimeout: cfg.ConnectionWriteTimeout, + MaxStreamWindowSize: cfg.MaxStreamWindowSize, + StreamCloseTimeout: cfg.StreamCloseTimeout, + StreamOpenTimeout: cfg.StreamOpenTimeout, + // NOTE: LogOutput is required, and without it yamux will failed to create the session. + LogOutput: os.Stderr, + } +} + +func (c *client) NewReverseListenerV2(ctx context.Context, token string, path string, cfg *ReverseListenerV2Config) (net.Listener, error) { + if token == "" { + return nil, errors.New("token is empty") + } + + u, err := url.JoinPath(c.http.BaseURL, path) + if err != nil { + return nil, err + } + + wsconn, _, err := DialContext(ctx, u, http.Header{ + "Authorization": []string{"Bearer " + token}, + }) + if err != nil { + return nil, err + } + + var listener *yamux.Session + + conn := wsconnadapter.New(wsconn) + + listener, err = yamux.Server(conn, YamuxConfigFromReverseListenerV2(cfg)) + if err != nil { + log.WithError(err).WithFields(log.Fields{ + "accept_backlog": cfg.AcceptBacklog, + "enable_keep_alive": cfg.EnableKeepAlive, + "keep_alive_interval": cfg.KeepAliveInterval, + "connection_write_timeout": cfg.ConnectionWriteTimeout, + "max_stream_window_size": cfg.MaxStreamWindowSize, + "stream_close_timeout": cfg.StreamCloseTimeout, + "stream_open_timeout": cfg.StreamOpenTimeout, + }).Error("failed to create muxed session") + + // NOTE: If we fail to create the session, we should try again with the [DefaultConfig] as the client + // could be using different settings. + log.WithError(err).Warning("trying to create muxed session with default config") + listener, err = yamux.Server(conn, YamuxConfigFromReverseListenerV2(&DefaultReverseListenerV2Config)) + if err != nil { + log.WithError(err).Error("failed to create muxed session with default config") + + return nil, err + } + + log.WithError(err).Warning("muxed session created with default config due to error with custom config") + } + + return listener, err +} diff --git a/pkg/api/client/client_public_test.go b/pkg/api/client/client_public_test.go index cd429b371cd..bf991554048 100644 --- a/pkg/api/client/client_public_test.go +++ b/pkg/api/client/client_public_test.go @@ -9,7 +9,7 @@ import ( "testing" mock "github.com/jarcoal/httpmock" - reversermock "github.com/shellhub-io/shellhub/pkg/api/client/mocks" + reversermock "github.com/shellhub-io/shellhub/pkg/api/client/reverser/mocks" "github.com/shellhub-io/shellhub/pkg/models" "github.com/shellhub-io/shellhub/pkg/revdial" "github.com/stretchr/testify/assert" @@ -329,7 +329,7 @@ func TestAuthPublicKey(t *testing.T) { Fingerprint: "cd:8a:1b:73:03:47:15:3c:7c:2b:df:5d:b9:64:63:77", Data: `{"Username":"test","Namespace":"namespace"}`, }, - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := mock.NewJsonResponder(401, nil) mock.RegisterResponder("POST", "/api/auth/ssh", responder) @@ -345,7 +345,7 @@ func TestAuthPublicKey(t *testing.T) { request: &models.PublicKeyAuthRequest{ Fingerprint: "cd:8a:1b:73:03:47:15:3c:7c:2b:df:5d:b9:64:63:77", }, - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := mock.NewJsonResponder(400, nil) mock.RegisterResponder("POST", "/api/auth/ssh", responder) @@ -362,7 +362,7 @@ func TestAuthPublicKey(t *testing.T) { Fingerprint: "cd:8a:1b:73:03:47:15:3c:7c:2b:df:5d:b9:64:63:77", Data: `{"Username":"test","Namespace":"namespace"}`, }, - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := mock.NewJsonResponder(404, nil) mock.RegisterResponder("POST", "/api/auth/ssh", responder) @@ -379,7 +379,7 @@ func TestAuthPublicKey(t *testing.T) { Fingerprint: "cd:8a:1b:73:03:47:15:3c:7c:2b:df:5d:b9:64:63:77", Data: `{"Username":"test","Namespace":"namespace"}`, }, - requiredMocks: func(client *http.Client) { + requiredMocks: func(_ *http.Client) { responder, _ := mock.NewJsonResponder(200, models.PublicKeyAuthResponse{ Signature: "hgihFKUWAr1QB87Eb7FiBu9pMjTLLBgLXEqNIYd4S+UoOZ7xqozEMds9EvwB1TwCjMa+uAmZsB7qtARVvoVPrUNp/OBQ7iKzV2+GpIpRFfEqa0ugQBf+XQBfo/irDnH/wAixgoqC3KUyIk+nQxwz7wvgVDB0WTxD2eK9TzyD3WIOSVGlPWNytx7HTP0TTN5EJ0tjj/H4v1F9t+8Nd3ZGUz0z73rZ3qKbzXBJBkRyyDDtWo9lGIOcz5e4LcgojNVxznsHDXJ/2gBnRL6JHZQm6v3gCpzZRRXA+cagSSuJzWQwwDmwydfiAJsbSPeen4+X+IEkfrXBW1KHMRsZh1AtTw==", }) @@ -415,7 +415,7 @@ func TestAuthPublicKey(t *testing.T) { } func TestReverseListener(t *testing.T) { - mock := new(reversermock.IReverser) + mock := new(reversermock.Reverser) tests := []struct { description string @@ -430,18 +430,18 @@ func TestReverseListener(t *testing.T) { expected: errors.New("token is empty"), }, { - description: "fail when connot auth the agent on the SSH server", + description: "fail when cannot auth the agent on the SSH server", token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c", requiredMocks: func() { - mock.On("Auth", context.Background(), "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c").Return(errors.New("")).Once() + mock.On("Auth", context.Background(), "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c", "").Return(errors.New("")).Once() }, expected: errors.New(""), }, { - description: "fail when connot create a new reverse listener", + description: "fail when cannot create a new reverse listener", token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c", requiredMocks: func() { - mock.On("Auth", context.Background(), "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c").Return(nil).Once() + mock.On("Auth", context.Background(), "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c", "").Return(nil).Once() mock.On("NewListener").Return(nil, errors.New("")).Once() }, @@ -451,7 +451,7 @@ func TestReverseListener(t *testing.T) { description: "success to create a new reverse listener", token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c", requiredMocks: func() { - mock.On("Auth", context.Background(), "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c").Return(nil).Once() + mock.On("Auth", context.Background(), "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c", "").Return(nil).Once() mock.On("NewListener").Return(new(revdial.Listener), nil).Once() }, @@ -468,7 +468,7 @@ func TestReverseListener(t *testing.T) { test.requiredMocks() - _, err = cli.NewReverseListener(ctx, test.token) + _, err = cli.NewReverseListenerV1(ctx, test.token, "") assert.Equal(t, err, test.expected) }) } diff --git a/pkg/api/client/client_test.go b/pkg/api/client/client_test.go index 13f187fe332..737d5e84f91 100644 --- a/pkg/api/client/client_test.go +++ b/pkg/api/client/client_test.go @@ -22,88 +22,88 @@ func TestNewClient(t *testing.T) { description: "failed to create when address is invalid", address: "localhost", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: ErrParseAddress, }, { description: "failed to create when some option return error", address: "http://localhost", opts: []Opt{ - func(c *client) error { + func(_ *client) error { return ErrFromOption }, }, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: ErrFromOption, }, { description: "success to create a new client with 127.0.0.1 in http", address: "http://127.0.0.1", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with 127.0.0.1 in https", address: "https://127.0.0.1", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with 127.0.0.1 in http with port", address: "http://127.0.0.1:80", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with 127.0.0.1 in https with port", address: "https://127.0.0.1:443", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with localhost in http", address: "http://localhost", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with localhost in https", address: "https://localhost", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with localhost in http with port", address: "http://localhost:80", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with localhost in https with port", address: "https://localhost:443", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with cloud.shellhub.io in https", address: "https://cloud.shellhub.io", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, { description: "success to create a new client with cloud.shellhub.io in https with port", address: "https://cloud.shellhub.io:443", opts: []Opt{}, - requiredMocks: func(client *http.Client) {}, + requiredMocks: func(_ *http.Client) {}, err: nil, }, } diff --git a/pkg/api/client/mocks/client.go b/pkg/api/client/mocks/client.go index 9a3a64e97f1..eb6959bb9e6 100644 --- a/pkg/api/client/mocks/client.go +++ b/pkg/api/client/mocks/client.go @@ -1,14 +1,17 @@ -// Code generated by mockery v2.20.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks import ( context "context" - models "github.com/shellhub-io/shellhub/pkg/models" + client "github.com/shellhub-io/shellhub/pkg/api/client" + mock "github.com/stretchr/testify/mock" - revdial "github.com/shellhub-io/shellhub/pkg/revdial" + models "github.com/shellhub-io/shellhub/pkg/models" + + net "net" ) // Client is an autogenerated mock type for the Client type @@ -20,6 +23,10 @@ type Client struct { func (_m *Client) AuthDevice(req *models.DeviceAuthRequest) (*models.DeviceAuthResponse, error) { ret := _m.Called(req) + if len(ret) == 0 { + panic("no return value specified for AuthDevice") + } + var r0 *models.DeviceAuthResponse var r1 error if rf, ok := ret.Get(0).(func(*models.DeviceAuthRequest) (*models.DeviceAuthResponse, error)); ok { @@ -46,6 +53,10 @@ func (_m *Client) AuthDevice(req *models.DeviceAuthRequest) (*models.DeviceAuthR func (_m *Client) AuthPublicKey(req *models.PublicKeyAuthRequest, token string) (*models.PublicKeyAuthResponse, error) { ret := _m.Called(req, token) + if len(ret) == 0 { + panic("no return value specified for AuthPublicKey") + } + var r0 *models.PublicKeyAuthResponse var r1 error if rf, ok := ret.Get(0).(func(*models.PublicKeyAuthRequest, string) (*models.PublicKeyAuthResponse, error)); ok { @@ -68,10 +79,14 @@ func (_m *Client) AuthPublicKey(req *models.PublicKeyAuthRequest, token string) return r0, r1 } -// Endpoints provides a mock function with given fields: +// Endpoints provides a mock function with no fields func (_m *Client) Endpoints() (*models.Endpoints, error) { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for Endpoints") + } + var r0 *models.Endpoints var r1 error if rf, ok := ret.Get(0).(func() (*models.Endpoints, error)); ok { @@ -98,6 +113,10 @@ func (_m *Client) Endpoints() (*models.Endpoints, error) { func (_m *Client) GetDevice(uid string) (*models.Device, error) { ret := _m.Called(uid) + if len(ret) == 0 { + panic("no return value specified for GetDevice") + } + var r0 *models.Device var r1 error if rf, ok := ret.Get(0).(func(string) (*models.Device, error)); ok { @@ -124,6 +143,10 @@ func (_m *Client) GetDevice(uid string) (*models.Device, error) { func (_m *Client) GetInfo(agentVersion string) (*models.Info, error) { ret := _m.Called(agentVersion) + if len(ret) == 0 { + panic("no return value specified for GetInfo") + } + var r0 *models.Info var r1 error if rf, ok := ret.Get(0).(func(string) (*models.Info, error)); ok { @@ -146,10 +169,14 @@ func (_m *Client) GetInfo(agentVersion string) (*models.Info, error) { return r0, r1 } -// ListDevices provides a mock function with given fields: +// ListDevices provides a mock function with no fields func (_m *Client) ListDevices() ([]models.Device, error) { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for ListDevices") + } + var r0 []models.Device var r1 error if rf, ok := ret.Get(0).(func() ([]models.Device, error)); ok { @@ -172,25 +199,29 @@ func (_m *Client) ListDevices() ([]models.Device, error) { return r0, r1 } -// NewReverseListener provides a mock function with given fields: ctx, token -func (_m *Client) NewReverseListener(ctx context.Context, token string) (*revdial.Listener, error) { - ret := _m.Called(ctx, token) +// NewReverseListenerV1 provides a mock function with given fields: ctx, token, path +func (_m *Client) NewReverseListenerV1(ctx context.Context, token string, path string) (net.Listener, error) { + ret := _m.Called(ctx, token, path) + + if len(ret) == 0 { + panic("no return value specified for NewReverseListenerV1") + } - var r0 *revdial.Listener + var r0 net.Listener var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (*revdial.Listener, error)); ok { - return rf(ctx, token) + if rf, ok := ret.Get(0).(func(context.Context, string, string) (net.Listener, error)); ok { + return rf(ctx, token, path) } - if rf, ok := ret.Get(0).(func(context.Context, string) *revdial.Listener); ok { - r0 = rf(ctx, token) + if rf, ok := ret.Get(0).(func(context.Context, string, string) net.Listener); ok { + r0 = rf(ctx, token, path) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*revdial.Listener) + r0 = ret.Get(0).(net.Listener) } } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, token) + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, token, path) } else { r1 = ret.Error(1) } @@ -198,13 +229,42 @@ func (_m *Client) NewReverseListener(ctx context.Context, token string) (*revdia return r0, r1 } -type mockConstructorTestingTNewClient interface { - mock.TestingT - Cleanup(func()) +// NewReverseListenerV2 provides a mock function with given fields: ctx, token, path, cfg +func (_m *Client) NewReverseListenerV2(ctx context.Context, token string, path string, cfg *client.ReverseListenerV2Config) (net.Listener, error) { + ret := _m.Called(ctx, token, path, cfg) + + if len(ret) == 0 { + panic("no return value specified for NewReverseListenerV2") + } + + var r0 net.Listener + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, *client.ReverseListenerV2Config) (net.Listener, error)); ok { + return rf(ctx, token, path, cfg) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string, *client.ReverseListenerV2Config) net.Listener); ok { + r0 = rf(ctx, token, path, cfg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(net.Listener) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string, *client.ReverseListenerV2Config) error); ok { + r1 = rf(ctx, token, path, cfg) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } // NewClient creates a new instance of Client. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -func NewClient(t mockConstructorTestingTNewClient) *Client { +// The first argument is typically a *testing.T value. +func NewClient(t interface { + mock.TestingT + Cleanup(func()) +}) *Client { mock := &Client{} mock.Mock.Test(t) diff --git a/pkg/api/client/mocks/reverser.go b/pkg/api/client/mocks/reverser.go deleted file mode 100644 index 8c767905f70..00000000000 --- a/pkg/api/client/mocks/reverser.go +++ /dev/null @@ -1,70 +0,0 @@ -// Code generated by mockery v2.20.0. DO NOT EDIT. - -package mocks - -import ( - context "context" - - revdial "github.com/shellhub-io/shellhub/pkg/revdial" - mock "github.com/stretchr/testify/mock" -) - -// IReverser is an autogenerated mock type for the IReverser type -type IReverser struct { - mock.Mock -} - -// Auth provides a mock function with given fields: ctx, token -func (_m *IReverser) Auth(ctx context.Context, token string) error { - ret := _m.Called(ctx, token) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, token) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// NewListener provides a mock function with given fields: -func (_m *IReverser) NewListener() (*revdial.Listener, error) { - ret := _m.Called() - - var r0 *revdial.Listener - var r1 error - if rf, ok := ret.Get(0).(func() (*revdial.Listener, error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() *revdial.Listener); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*revdial.Listener) - } - } - - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -type mockConstructorTestingTNewIReverser interface { - mock.TestingT - Cleanup(func()) -} - -// NewIReverser creates a new instance of IReverser. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -func NewIReverser(t mockConstructorTestingTNewIReverser) *IReverser { - mock := &IReverser{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/pkg/api/client/options.go b/pkg/api/client/options.go index d12f2fb3d6d..cb693d8094b 100644 --- a/pkg/api/client/options.go +++ b/pkg/api/client/options.go @@ -4,6 +4,7 @@ import ( "net/url" "strconv" + "github.com/shellhub-io/shellhub/pkg/api/client/reverser" "github.com/sirupsen/logrus" ) @@ -66,10 +67,18 @@ func WithLogger(logger *logrus.Logger) Opt { } } -func WithReverser(reverser IReverser) Opt { +func WithReverser(reverser reverser.Reverser) Opt { return func(c *client) error { c.reverser = reverser return nil } } + +func WithVersion(version string) Opt { + return func(c *client) error { + c.http.SetHeader("User-Agent", "shellhub-agent/"+version) + + return nil + } +} diff --git a/pkg/api/client/reverser.go b/pkg/api/client/reverser.go index 00783f37aaf..9c2cb674ca5 100644 --- a/pkg/api/client/reverser.go +++ b/pkg/api/client/reverser.go @@ -8,16 +8,11 @@ import ( "net/url" "github.com/gorilla/websocket" + "github.com/shellhub-io/shellhub/pkg/api/client/reverser" "github.com/shellhub-io/shellhub/pkg/revdial" "github.com/shellhub-io/shellhub/pkg/wsconnadapter" ) -//go:generate mockery --name=IReverser --filename=reverser.go -type IReverser interface { - Auth(ctx context.Context, token string) error - NewListener() (*revdial.Listener, error) -} - type Reverser struct { conn *websocket.Conn // host is the ShellHub's server address. @@ -26,7 +21,7 @@ type Reverser struct { host string } -var _ IReverser = new(Reverser) +var _ reverser.Reverser = new(Reverser) func NewReverser(host string) *Reverser { return &Reverser{ @@ -35,8 +30,8 @@ func NewReverser(host string) *Reverser { } // Auth creates a initial connection to the ShellHub SSH's server and authenticate it with the token received. -func (r *Reverser) Auth(ctx context.Context, token string) error { - uri, err := url.JoinPath(r.host, "/ssh/connection") +func (r *Reverser) Auth(ctx context.Context, token string, connPath string) error { + uri, err := url.JoinPath(r.host, connPath) if err != nil { return err } diff --git a/pkg/api/client/reverser/mocks/reverser.go b/pkg/api/client/reverser/mocks/reverser.go new file mode 100644 index 00000000000..a0b8d3aba44 --- /dev/null +++ b/pkg/api/client/reverser/mocks/reverser.go @@ -0,0 +1,77 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + revdial "github.com/shellhub-io/shellhub/pkg/revdial" + mock "github.com/stretchr/testify/mock" +) + +// Reverser is an autogenerated mock type for the Reverser type +type Reverser struct { + mock.Mock +} + +// Auth provides a mock function with given fields: ctx, token, connPath +func (_m *Reverser) Auth(ctx context.Context, token string, connPath string) error { + ret := _m.Called(ctx, token, connPath) + + if len(ret) == 0 { + panic("no return value specified for Auth") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { + r0 = rf(ctx, token, connPath) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewListener provides a mock function with no fields +func (_m *Reverser) NewListener() (*revdial.Listener, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for NewListener") + } + + var r0 *revdial.Listener + var r1 error + if rf, ok := ret.Get(0).(func() (*revdial.Listener, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() *revdial.Listener); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*revdial.Listener) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewReverser creates a new instance of Reverser. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewReverser(t interface { + mock.TestingT + Cleanup(func()) +}) *Reverser { + mock := &Reverser{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/api/client/reverser/reverser.go b/pkg/api/client/reverser/reverser.go new file mode 100644 index 00000000000..13724de8abb --- /dev/null +++ b/pkg/api/client/reverser/reverser.go @@ -0,0 +1,13 @@ +package reverser + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/revdial" +) + +//go:generate mockery --name=Reverser --filename=reverser.go +type Reverser interface { + Auth(ctx context.Context, token string, connPath string) error + NewListener() (*revdial.Listener, error) +} diff --git a/pkg/api/filter/filter.go b/pkg/api/filter/filter.go deleted file mode 100644 index 081bb99d38d..00000000000 --- a/pkg/api/filter/filter.go +++ /dev/null @@ -1,163 +0,0 @@ -package filter - -import ( - "encoding/base64" - "encoding/json" - - "github.com/pkg/errors" -) - -var ( - ErrFilterTypeUnknow = errors.New("unknow filter type") - ErrPropertyNameInvalid = errors.New("invalid property name") - ErrPropertyOperatorUnknow = errors.New("unknow property operator") -) - -type ( - // Filter holds data for filtering results in a query. - Filter struct { - Type string `json:"type,omitempty"` - Params interface{} `json:"params,omitempty"` - } - - // FilterTypeProperty holds data to filter a property based on value and comparison operator. - FilterTypeProperty struct { //nolint:revive - // Property name - Name string `json:"name"` - // Comparison operator - // - // contains: check if property contains the value - // eq: check if the property is equal to value - // bool: check if the property is true or false based on value - // gt: check if the property is greater than value - // lt: check if the property is less than value - Operator string `json:"operator"` - // Value to compare - Value interface{} `json:"value"` - } - - // FilterTypeOperator holds data to apply a conditional operator in a filter. - FilterTypeOperator struct { //nolint:revive - // Conditional operator name - // - // and: AND conditional operator - // or: OR conditional operator - Name string `json:"name"` - } - - // FilterList is a slice of Filter. - FilterList []*Filter //nolint:revive -) - -func (f *Filter) UnmarshalJSON(data []byte) error { - var params json.RawMessage - - type filter Filter - aux := filter{ - Params: ¶ms, - } - - if err := json.Unmarshal(data, &aux); err != nil { - return err - } - - switch aux.Type { - case "property": - var property FilterTypeProperty - if err := json.Unmarshal(params, &property); err != nil { - return err - } - f.Params = &property - case "operator": - var operator FilterTypeOperator - if err := json.Unmarshal(params, &operator); err != nil { - return err - } - f.Params = &operator - } - - f.Type = aux.Type - - return nil -} - -// IsValid check if is valid. -func (f *Filter) IsValid() error { - switch f.Type { - case "property": - if filterType, ok := f.Params.(*FilterTypeProperty); ok { - return filterType.isValid() - } - case "operator": - if filterType, ok := f.Params.(*FilterTypeOperator); ok { - return filterType.isValid() - } - } - - return ErrFilterTypeUnknow -} - -// isValid check if is valid. -func (f *FilterTypeProperty) isValid() error { - supportedOperators := map[string]bool{ - "contains": true, - "eq": true, - "bool": true, - "gt": true, - "lt": true, - } - - _, validOperator := supportedOperators[f.Operator] - - if !validOperator { - return errors.Wrap(ErrPropertyOperatorUnknow, f.Operator) - } - - if f.Name == "" { - return ErrPropertyNameInvalid - } - - return nil -} - -// isValid check if is valid. -func (f *FilterTypeOperator) isValid() error { - supportedOperators := map[string]bool{ - "and": true, - "or": true, - } - - if _, ok := supportedOperators[f.Name]; !ok { - return ErrPropertyOperatorUnknow - } - - return nil -} - -// UnmarshalParam converts a base64 filter string to a FilterList. -func (f *FilterList) UnmarshalParam(value string) error { - raw, err := base64.StdEncoding.DecodeString(value) - if err != nil { - return errors.Wrap(err, "filters") - } - - var filter FilterList - if err := json.Unmarshal(raw, &filter); len(raw) > 0 && err != nil { - return err - } - - *f = filter - - return nil -} - -// IsValid check if filter list is valid. -func (f *FilterList) IsValid() error { - for _, filter := range *f { - if err := filter.IsValid(); err != nil { - return err - } - } - - return nil -} diff --git a/pkg/api/filter/filter_test.go b/pkg/api/filter/filter_test.go deleted file mode 100644 index c4703ca05a6..00000000000 --- a/pkg/api/filter/filter_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package filter - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestFilterTypeProperty(t *testing.T) { - f := Filter{Type: "property", Params: &FilterTypeProperty{Name: "name", Operator: "contains", Value: "value"}} - assert.NoError(t, f.IsValid()) - f = Filter{Type: "property", Params: &FilterTypeProperty{Name: "name", Operator: "eq", Value: "value"}} - assert.NoError(t, f.IsValid()) - f = Filter{Type: "property", Params: &FilterTypeProperty{Name: "name", Operator: "bool", Value: "value"}} - assert.NoError(t, f.IsValid()) - f = Filter{Type: "property", Params: &FilterTypeProperty{Name: "name", Operator: "gt", Value: "value"}} - assert.NoError(t, f.IsValid()) - f = Filter{Type: "property", Params: &FilterTypeProperty{Name: "name", Operator: "lt", Value: "value"}} - assert.NoError(t, f.IsValid()) -} - -func TestFilterTypeOperator(t *testing.T) { - f := Filter{Type: "operator", Params: &FilterTypeOperator{Name: "and"}} - assert.NoError(t, f.IsValid()) - - f = Filter{Type: "operator", Params: &FilterTypeOperator{Name: "or"}} - assert.NoError(t, f.IsValid()) -} diff --git a/pkg/api/internalclient/billing.go b/pkg/api/internalclient/billing.go new file mode 100644 index 00000000000..0f39864e8f9 --- /dev/null +++ b/pkg/api/internalclient/billing.go @@ -0,0 +1,45 @@ +package internalclient + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" +) + +// billingAPI defines methods for interacting with billing-related functionality. +type billingAPI interface { + // BillingReport sends a billing report for the specified tenant and action. + // It returns an error, if any. + BillingReport(ctx context.Context, tenant string, action string) error + + // BillingEvaluate evaluates the billing status for the specified tenant. + // It returns the billing evaluation result and an error, if any. + BillingEvaluate(ctx context.Context, tenantID string) (*models.BillingEvaluation, error) +} + +func (c *client) BillingReport(ctx context.Context, tenant string, action string) error { + res, err := c.http. + R(). + SetContext(ctx). + SetHeader("X-Tenant-ID", tenant). + SetQueryParam("action", action). + Post(c.config.EnterpriseBaseURL + "/internal/billing/report") + + return HasError(res, err) +} + +func (c *client) BillingEvaluate(ctx context.Context, tenantID string) (*models.BillingEvaluation, error) { + eval := new(models.BillingEvaluation) + + resp, err := c.http. + R(). + SetContext(ctx). + SetHeader("X-Tenant-ID", tenantID). + SetResult(&eval). + Post(c.config.EnterpriseBaseURL + "/internal/billing/evaluate") + if err := HasError(resp, err); err != nil { + return nil, err + } + + return eval, nil +} diff --git a/pkg/api/internalclient/client.go b/pkg/api/internalclient/client.go index 34ad8b5e42e..83800275e46 100644 --- a/pkg/api/internalclient/client.go +++ b/pkg/api/internalclient/client.go @@ -2,18 +2,33 @@ package internalclient import ( "errors" - "fmt" - "math" "net" "net/http" - "net/url" + "time" resty "github.com/go-resty/resty/v2" - "github.com/hibiken/asynq" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/sirupsen/logrus" + "github.com/shellhub-io/shellhub/pkg/worker" + log "github.com/sirupsen/logrus" ) +//go:generate mockery --name=Client --filename=internalclient.go +type Client interface { + deviceAPI + namespaceAPI + billingAPI + sessionAPI + sshkeyAPI + firewallAPI +} + +type client struct { + http *resty.Client + logger *log.Logger + worker worker.Client + + config *Config +} + const ( DeviceUIDHeader = "X-Device-UID" ) @@ -21,145 +36,81 @@ const ( var ( ErrConnectionFailed = errors.New("connection failed") ErrNotFound = errors.New("not found") + ErrForbidden = errors.New("forbidden") ErrUnknown = errors.New("unknown error") ) -// Options wraps injectable values to a new API internal client. -// NOTE(r): This is a workaround to inject the Asynq client to the API internal client, because the [client] structure -// and its properties are privated. -type Options struct { - Asynq *asynq.Client -} - -type Opt func(*Options) error +func NewClient(cfg *Config, opts ...clientOption) (Client, error) { + if cfg == nil { + var err error -func NewClient(opts ...Opt) Client { - httpClient := resty.New() - httpClient.SetRetryCount(math.MaxInt32) - httpClient.AddRetryCondition(func(r *resty.Response, err error) bool { - if _, ok := err.(net.Error); ok { // if the error is a network error, retry. - return true + cfg, err = NewConfigFromEnv() + if err != nil { + return nil, err } + } - return r.StatusCode() >= http.StatusInternalServerError && r.StatusCode() != http.StatusNotImplemented - }) + httpClient := resty.New() c := &client{ - host: apiHost, - port: apiPort, - scheme: apiScheme, http: httpClient, + config: cfg, } - o := new(Options) for _, opt := range opts { - if err := opt(o); err != nil { - return nil + if err := opt(c); err != nil { + return nil, err } } - if o.Asynq != nil { - c.asynq = o.Asynq - } - if c.logger != nil { httpClient.SetLogger(&LeveledLogger{c.logger}) } - return c -} - -// NewClientWithAsynq creates a new API internal client with the Asynq client injected to turn the API internal client -// able to enqueue ping tasks to the Asynq server and late process by API server. -// -// It uses the [NewClient] function to create a new API internal client and injects the Asynq client to it through the -// [Options] structure. -func NewClientWithAsynq(uri string) Client { - return NewClient(func(o *Options) error { - // The internal client used by the SSH server needs to be able to enqueue tasks to the Asynq server, due that, - // we must set the Asynq client to the internal client as a configuration function. - options, err := asynq.ParseRedisURI(uri) - if err != nil { - return err - } - - client := asynq.NewClient(options) - if client == nil { - return errors.New("failed to create Asynq client") + // NOTE: Avoid setting a global base URL on the Resty client. Calls to enterprise endpoints + // will use c.config.EnterpriseBaseURL explicitly when needed. + httpClient.SetRetryCount(c.config.RetryCount) + httpClient.SetRetryWaitTime(time.Duration(c.config.RetryWaitTime) * time.Second) + httpClient.SetRetryMaxWaitTime(time.Duration(c.config.RetryMaxWaitTime) * time.Second) + httpClient.AddRetryCondition(func(r *resty.Response, err error) bool { + if _, ok := err.(net.Error); ok { // if the error is a network error, retry. + return true } - o.Asynq = client - - return nil + return r.StatusCode() >= http.StatusInternalServerError && r.StatusCode() != http.StatusNotImplemented }) -} -type commonAPI interface { - ListDevices() ([]models.Device, error) - GetDevice(uid string) (*models.Device, error) - GetDeviceByPublicURLAddress(address string) (*models.Device, error) -} + httpClient.OnBeforeRequest(func(c *resty.Client, r *resty.Request) error { + // NOTE: Add a unique request ID to each request for better traceability. + r.Header.Set("X-Request-Id", randomString(32)) -type client struct { - scheme string - host string - port int - http *resty.Client - logger *logrus.Logger - asynq *asynq.Client -} + log.WithFields(log.Fields{ + "id": r.Header.Get("X-Request-Id"), + "attempt": r.Attempt, + "method": r.Method, + "url": r.URL, + }).Info("internal client request send") -func (c *client) ListDevices() ([]models.Device, error) { - list := []models.Device{} - _, err := c.http.R(). - SetResult(list). - Get(buildURL(c, "/api/devices")) + return nil + }) - return list, err -} + httpClient.OnAfterResponse(func(c *resty.Client, r *resty.Response) error { + log.WithFields(log.Fields{ + "id": r.Header().Get("X-Request-Id"), + "attempt": r.Request.Attempt, + "method": r.Request.Method, + "url": r.Request.URL, + "status": r.StatusCode(), + }).Info("internal client response received") -func (c *client) GetDevice(uid string) (*models.Device, error) { - var device *models.Device - resp, err := c.http.R(). - SetResult(&device). - Get(buildURL(c, fmt.Sprintf("/api/devices/%s", uid))) - if err != nil { - return nil, ErrConnectionFailed - } + return nil + }) - switch resp.StatusCode() { - case 400: - return nil, ErrNotFound - case 200: - return device, nil - default: - return nil, ErrUnknown - } + return c, nil } -func (c *client) GetDeviceByPublicURLAddress(address string) (*models.Device, error) { - httpClient := resty.New() - - var device *models.Device - resp, err := httpClient.R(). - SetResult(&device). - Get(buildURL(c, fmt.Sprintf("/internal/devices/public/%s", address))) - if err != nil { - return nil, ErrConnectionFailed - } - - switch resp.StatusCode() { - case 404: - return nil, ErrNotFound - case 200: - return device, nil - default: - return nil, ErrUnknown +func (c *client) mustWorker() { + if c.worker == nil { + panic("Client does not have any worker") } } - -func buildURL(c *client, uri string) string { - u, _ := url.Parse(fmt.Sprintf("%s://%s:%d%s", c.scheme, c.host, c.port, uri)) - - return u.String() -} diff --git a/pkg/api/internalclient/client_internal.go b/pkg/api/internalclient/client_internal.go deleted file mode 100644 index 033d96146d2..00000000000 --- a/pkg/api/internalclient/client_internal.go +++ /dev/null @@ -1,241 +0,0 @@ -package internalclient - -import ( - "errors" - "fmt" - "net" - "net/http" - - "github.com/go-resty/resty/v2" - "github.com/hibiken/asynq" - "github.com/shellhub-io/shellhub/pkg/models" -) - -const ( - apiHost = "api" - apiPort = 8080 - apiScheme = "http" - billingURL = "billing-api" -) - -type Client interface { - commonAPI - internalAPI -} - -type internalAPI interface { - LookupDevice() - GetPublicKey(fingerprint, tenant string) (*models.PublicKey, error) - CreatePrivateKey() (*models.PrivateKey, error) - EvaluateKey(fingerprint string, dev *models.Device, username string) (bool, error) - DevicesOffline(id string) error - DevicesHeartbeat(id string) error - FirewallEvaluate(lookup map[string]string) error - SessionAsAuthenticated(uid string) []error - FinishSession(uid string) []error - KeepAliveSession(uid string) []error - RecordSession(session *models.SessionRecorded, recordURL string) - Lookup(lookup map[string]string) (string, []error) - DeviceLookup(lookup map[string]string) (*models.Device, []error) - BillingReport(tenant string, action string) (int, error) - BillingEvaluate(tenantID string) (*models.BillingEvaluation, int, error) -} - -func (c *client) LookupDevice() { -} - -func (c *client) BillingReport(tenant string, action string) (int, error) { - res, err := c.http.R(). - SetHeader("X-Tenant-ID", tenant). - SetQueryParam("action", action). - Post(fmt.Sprintf("%s://%s:%d/internal/billing/report", apiScheme, billingURL, apiPort)) - if err != nil { - return http.StatusInternalServerError, err - } - - return res.StatusCode(), nil -} - -func (c *client) BillingEvaluate(tenantID string) (*models.BillingEvaluation, int, error) { - var evaluation *models.BillingEvaluation - resp, err := c.http.R(). - SetHeader("X-Tenant-ID", tenantID). - SetResult(&evaluation). - Post(fmt.Sprintf("%s://%s:%d/internal/billing/evaluate", apiScheme, billingURL, apiPort)) - if err != nil { - return evaluation, resp.StatusCode(), err - } - - return evaluation, resp.StatusCode(), nil -} - -func (c *client) GetPublicKey(fingerprint, tenant string) (*models.PublicKey, error) { - var pubKey *models.PublicKey - resp, err := c.http.R(). - SetResult(&pubKey). - Get(buildURL(c, fmt.Sprintf("/internal/sshkeys/public-keys/%s/%s", fingerprint, tenant))) - if err != nil { - return nil, err - } - - if resp.StatusCode() == 404 { - return nil, ErrNotFound - } - - return pubKey, nil -} - -func (c *client) EvaluateKey(fingerprint string, dev *models.Device, username string) (bool, error) { - var evaluate *bool - - resp, err := c.http.R(). - SetBody(dev). - SetResult(&evaluate). - Post(buildURL(c, fmt.Sprintf("/internal/sshkeys/public-keys/evaluate/%s/%s", fingerprint, username))) - if err != nil { - return false, err - } - - if resp.StatusCode() == 200 { - return *evaluate, nil - } - - return false, nil -} - -func (c *client) CreatePrivateKey() (*models.PrivateKey, error) { - var privKey *models.PrivateKey - _, err := c.http.R(). - SetResult(&privKey). - Post(buildURL(c, "/internal/sshkeys/private-keys")) - if err != nil { - return nil, err - } - - return privKey, nil -} - -func (c *client) DevicesOffline(id string) error { - _, err := c.http.R(). - Post(buildURL(c, fmt.Sprintf("/internal/devices/%s/offline", id))) - if err != nil { - return err - } - - return nil -} - -func (c *client) DevicesHeartbeat(id string) error { - _, err := c.asynq.Enqueue(asynq.NewTask("api:heartbeat", []byte(id)), asynq.Queue("api"), asynq.Group("heartbeats")) - - return err -} - -var ( - ErrFirewallConnection = errors.New("failed to make the request to evaluate the firewall") - ErrFirewallBlock = errors.New("a firewall rule prohibit this connection") -) - -func (c *client) FirewallEvaluate(lookup map[string]string) error { - local := resty.New() - local.AddRetryCondition(func(r *resty.Response, err error) bool { - if _, ok := err.(net.Error); ok { - return true - } - - return r.StatusCode() >= http.StatusInternalServerError && r.StatusCode() != http.StatusNotImplemented - }) - - resp, err := local. - SetRetryCount(10). - R(). - SetQueryParams(lookup). - Get("http://cloud-api:8080/internal/firewall/rules/evaluate") - if err != nil { - return ErrFirewallConnection - } - - if resp.StatusCode() != http.StatusOK { - return ErrFirewallBlock - } - - return nil -} - -// SessionAsAuthenticated makes a HTTP request to ShellHub API server to mark the session as authenticated. -func (c *client) SessionAsAuthenticated(uid string) []error { - var errors []error - _, err := c.http.R(). - SetBody(&models.Status{ - Authenticated: true, - }). - Patch(buildURL(c, fmt.Sprintf("/internal/sessions/"+uid))) - if err != nil { - errors = append(errors, err) - } - - return errors -} - -func (c *client) FinishSession(uid string) []error { - var errors []error - _, err := c.http.R(). - Post(buildURL(c, fmt.Sprintf("/internal/sessions/%s/finish", uid))) - if err != nil { - errors = append(errors, err) - } - - return errors -} - -func (c *client) KeepAliveSession(uid string) []error { - var errors []error - _, err := c.http.R(). - Post(buildURL(c, fmt.Sprintf("/internal/sessions/%s/keepalive", uid))) - if err != nil { - errors = append(errors, err) - } - - return errors -} - -func (c *client) RecordSession(session *models.SessionRecorded, recordURL string) { - _, _ = c.http.R(). - SetBody(session). - Post(fmt.Sprintf("http://"+recordURL+"/internal/sessions/%s/record", session.UID)) -} - -func (c *client) Lookup(lookup map[string]string) (string, []error) { - var device struct { - UID string `json:"uid"` - } - - resp, _ := c.http.R(). - SetQueryParams(lookup). - SetResult(&device). - Get(buildURL(c, "/internal/lookup")) - - if resp.StatusCode() != http.StatusOK { - return "", []error{errors.New("lookup failed")} - } - - return device.UID, nil -} - -func (c *client) DeviceLookup(lookup map[string]string) (*models.Device, []error) { - var device *models.Device - - resp, err := c.http.R(). - SetQueryParams(lookup). - SetResult(&device). - Get(buildURL(c, "/internal/lookup")) - if err != nil { - return nil, []error{err} - } - - if resp.StatusCode() != http.StatusOK { - return nil, []error{err} - } - - return device, nil -} diff --git a/pkg/api/internalclient/config.go b/pkg/api/internalclient/config.go new file mode 100644 index 00000000000..1c5fa3a3929 --- /dev/null +++ b/pkg/api/internalclient/config.go @@ -0,0 +1,41 @@ +package internalclient + +import ( + "github.com/shellhub-io/shellhub/pkg/envs" +) + +// Config holds configuration options for the client. +type Config struct { + // RetryCount defines how many times the client should retry a request in case of failure. + RetryCount int `env:"SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_COUNT,default=3"` + // RetryWaitTime defines the wait time between retries. + RetryWaitTime int `env:"SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_WAIT_TIME,default=5"` + // RetryMaxWaitTime defines the maximum wait time between retries. + RetryMaxWaitTime int `env:"SHELLHUB_INTERNAL_HTTP_CLIENT_RETRY_MAX_WAIT_TIME,default=20"` + + // APIBaseURL defines the base URL for the API service. + APIBaseURL string `env:"SHELLHUB_INTERNAL_HTTP_CLIENT_API_BASE_URL,default=http://api:8080"` + + // EnterpriseBaseURL defines the base URL for enterprise endpoints (cloud component). + EnterpriseBaseURL string `env:"SHELLHUB_INTERNAL_HTTP_CLIENT_ENTERPRISE_BASE_URL,default=http://cloud:8080"` +} + +func NewConfigFromEnv() (*Config, error) { + env, err := envs.Parse[Config]() + if err != nil { + return nil, err + } + + return env, nil +} + +// DefaultConfig returns a Config struct with default values. +func DefaultConfig() (*Config, error) { + return &Config{ + RetryCount: 3, + RetryWaitTime: 5, + RetryMaxWaitTime: 20, + APIBaseURL: "http://api:8080", + EnterpriseBaseURL: "http://cloud:8080", + }, nil +} diff --git a/pkg/api/internalclient/device.go b/pkg/api/internalclient/device.go new file mode 100644 index 00000000000..e3d186b47f8 --- /dev/null +++ b/pkg/api/internalclient/device.go @@ -0,0 +1,145 @@ +package internalclient + +import ( + "context" + "time" + + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/worker" +) + +type deviceAPI interface { + // ListDevices returns a list of devices. + ListDevices(ctx context.Context) ([]models.Device, error) + + // GetDevice retrieves device information for the specified UID. + GetDevice(ctx context.Context, uid string) (*models.Device, error) + + // DevicesOffline updates a device's status to offline. + DevicesOffline(ctx context.Context, uid string) error + + // DevicesHeartbeat enqueues a task to send a heartbeat for the device. + DevicesHeartbeat(ctx context.Context, uid string) error + + // Lookup performs a lookup operation based on the provided parameters. + Lookup(ctx context.Context, lookup map[string]string) (string, error) + + // DeviceLookup performs a lookup operation based on the provided parameters. + DeviceLookup(ctx context.Context, tenantID, name string) (*models.Device, error) + + // LookupWebEndpoints retrieves a web endpoint by its address. + LookupWebEndpoints(ctx context.Context, address string) (*WebEndpoint, error) +} + +func (c *client) DevicesOffline(ctx context.Context, uid string) error { + res, err := c.http. + R(). + SetContext(ctx). + SetPathParam("uid", uid). + Post(c.config.APIBaseURL + "/internal/devices/{uid}/offline") + + return HasError(res, err) +} + +func (c *client) DevicesHeartbeat(ctx context.Context, uid string) error { + return c.worker.SubmitToBatch(ctx, worker.TaskPattern("api:heartbeat"), []byte(uid)) +} + +func (c *client) Lookup(ctx context.Context, lookup map[string]string) (string, error) { + var device struct { + UID string `json:"uid"` + } + + resp, err := c.http. + R(). + SetContext(ctx). + SetQueryParams(lookup). + SetResult(&device). + Get(c.config.APIBaseURL + "/internal/lookup") + if err := HasError(resp, err); err != nil { + return "", err + } + + return device.UID, nil +} + +func (c *client) DeviceLookup(ctx context.Context, tenantID, name string) (*models.Device, error) { + device := new(models.Device) + + resp, err := c.http. + R(). + SetContext(ctx). + SetQueryParam("tenant_id", tenantID). + SetQueryParam("name", name). + SetResult(&device). + Get(c.config.APIBaseURL + "/internal/device/lookup") + if err := HasError(resp, err); err != nil { + return nil, err + } + + return device, nil +} + +func (c *client) ListDevices(ctx context.Context) ([]models.Device, error) { + list := []models.Device{} + + resp, err := c.http. + R(). + SetContext(ctx). + SetResult(&list). + Get(c.config.APIBaseURL + "/api/devices") + if err := HasError(resp, err); err != nil { + return nil, err + } + + return list, nil +} + +func (c *client) GetDevice(ctx context.Context, uid string) (*models.Device, error) { + device := new(models.Device) + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParam("uid", uid). + SetResult(&device). + Get(c.config.APIBaseURL + "/api/devices/{uid}") + if err := HasError(resp, err); err != nil { + return nil, err + } + + return device, nil +} + +type WebEndpointTLS struct { + Enabled bool `json:"enabled"` + Verify bool `json:"verify"` + Domain string `json:"domain"` +} + +type WebEndpoint struct { + Address string `json:"address"` + Namespace string `json:"namespace"` + DeviceUID string `json:"device_uid"` + Device *models.Device `json:"device"` + Host string `json:"host"` + Port int `json:"port"` + TimeToLive int `json:"ttl"` + TLS WebEndpointTLS `json:"tls"` + ExpiresIn time.Time `json:"expires_in"` + CreatedAt time.Time `json:"time" bson:"time"` +} + +func (c *client) LookupWebEndpoints(ctx context.Context, address string) (*WebEndpoint, error) { + var endpoint *WebEndpoint + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParam("address", address). + SetResult(&endpoint). + Get(c.config.EnterpriseBaseURL + "/internal/web-endpoints/{address}") + if err := HasError(resp, err); err != nil { + return nil, err + } + + return endpoint, nil +} diff --git a/pkg/api/internalclient/errors.go b/pkg/api/internalclient/errors.go new file mode 100644 index 00000000000..dd9197b9711 --- /dev/null +++ b/pkg/api/internalclient/errors.go @@ -0,0 +1,36 @@ +package internalclient + +import ( + "errors" + "fmt" + + resty "github.com/go-resty/resty/v2" +) + +// ErrRequestFailed is returned when an HTTP request fails to be executed. +var ErrRequestFailed = errors.New("request failed") + +// Error represents a custom error structure that includes an HTTP status code and a message. +type Error struct { + Code int `json:"code"` + Message string `json:"message"` +} + +func (e *Error) Error() string { + return fmt.Sprintf("code=%d, message=%s", e.Code, e.Message) +} + +// HasError checks if there was an error in the HTTP response or if the provided error is not nil. +func HasError(resp *resty.Response, err error) error { + if err != nil { + return errors.Join(ErrRequestFailed, err) + } + + if resp.IsError() { + // NOTE: If we got a error, status code great than 399, we create our custom error to load it with the response + // data. + return &Error{Code: resp.StatusCode(), Message: resp.Status()} + } + + return nil +} diff --git a/pkg/api/internalclient/firewall.go b/pkg/api/internalclient/firewall.go new file mode 100644 index 00000000000..38e426ce921 --- /dev/null +++ b/pkg/api/internalclient/firewall.go @@ -0,0 +1,22 @@ +package internalclient + +import ( + "context" +) + +// firewallAPI defines methods for interacting with firewall-related functionality. +type firewallAPI interface { + // FirewallEvaluate evaluates firewall rules based on the provided lookup parameters. + // It returns an error if the evaluation fails or if a firewall rule prohibits the connection. + FirewallEvaluate(ctx context.Context, lookup map[string]string) error +} + +func (c *client) FirewallEvaluate(ctx context.Context, lookup map[string]string) error { + resp, err := c.http. + R(). + SetContext(ctx). + SetQueryParams(lookup). + Get(c.config.EnterpriseBaseURL + "/internal/firewall/rules/evaluate") + + return HasError(resp, err) +} diff --git a/pkg/api/internalclient/logger.go b/pkg/api/internalclient/logger.go index 95c31ccf819..ff491c6adde 100644 --- a/pkg/api/internalclient/logger.go +++ b/pkg/api/internalclient/logger.go @@ -8,20 +8,20 @@ type LeveledLogger struct { Logger *logrus.Logger } -func (l *LeveledLogger) Errorf(msg string, keysAndValues ...interface{}) { +func (l *LeveledLogger) Errorf(msg string, keysAndValues ...any) { l.Logger.WithFields(toFields(keysAndValues)).Error(msg) } -func (l *LeveledLogger) Debugf(msg string, keysAndValues ...interface{}) { +func (l *LeveledLogger) Debugf(msg string, keysAndValues ...any) { l.Logger.WithFields(toFields(keysAndValues)).Debug(msg) } -func (l *LeveledLogger) Warnf(msg string, keysAndValues ...interface{}) { +func (l *LeveledLogger) Warnf(msg string, keysAndValues ...any) { l.Logger.WithFields(toFields(keysAndValues)).Warn(msg) } -func toFields(keysAndValues []interface{}) logrus.Fields { - fields := make(map[string]interface{}) +func toFields(keysAndValues []any) logrus.Fields { + fields := make(map[string]any) for i := 0; i < len(keysAndValues); i += 2 { fields[keysAndValues[i].(string)] = keysAndValues[i+1] diff --git a/pkg/api/internalclient/mocks/internalclient.go b/pkg/api/internalclient/mocks/internalclient.go index c6f0eda0d96..1662d2c3b6c 100644 --- a/pkg/api/internalclient/mocks/internalclient.go +++ b/pkg/api/internalclient/mocks/internalclient.go @@ -1,10 +1,18 @@ -// Code generated by mockery v2.20.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks import ( - models "github.com/shellhub-io/shellhub/pkg/models" + context "context" + + internalclient "github.com/shellhub-io/shellhub/pkg/api/internalclient" mock "github.com/stretchr/testify/mock" + + models "github.com/shellhub-io/shellhub/pkg/models" + + requests "github.com/shellhub-io/shellhub/pkg/api/requests" + + websocket "github.com/gorilla/websocket" ) // Client is an autogenerated mock type for the Client type @@ -12,82 +20,77 @@ type Client struct { mock.Mock } -// BillingEvaluate provides a mock function with given fields: tenantID -func (_m *Client) BillingEvaluate(tenantID string) (*models.BillingEvaluation, int, error) { - ret := _m.Called(tenantID) +// BillingEvaluate provides a mock function with given fields: ctx, tenantID +func (_m *Client) BillingEvaluate(ctx context.Context, tenantID string) (*models.BillingEvaluation, error) { + ret := _m.Called(ctx, tenantID) + + if len(ret) == 0 { + panic("no return value specified for BillingEvaluate") + } var r0 *models.BillingEvaluation - var r1 int - var r2 error - if rf, ok := ret.Get(0).(func(string) (*models.BillingEvaluation, int, error)); ok { - return rf(tenantID) + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (*models.BillingEvaluation, error)); ok { + return rf(ctx, tenantID) } - if rf, ok := ret.Get(0).(func(string) *models.BillingEvaluation); ok { - r0 = rf(tenantID) + if rf, ok := ret.Get(0).(func(context.Context, string) *models.BillingEvaluation); ok { + r0 = rf(ctx, tenantID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.BillingEvaluation) } } - if rf, ok := ret.Get(1).(func(string) int); ok { - r1 = rf(tenantID) - } else { - r1 = ret.Get(1).(int) - } - - if rf, ok := ret.Get(2).(func(string) error); ok { - r2 = rf(tenantID) + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, tenantID) } else { - r2 = ret.Error(2) + r1 = ret.Error(1) } - return r0, r1, r2 + return r0, r1 } -// BillingReport provides a mock function with given fields: tenant, action -func (_m *Client) BillingReport(tenant string, action string) (int, error) { - ret := _m.Called(tenant, action) +// BillingReport provides a mock function with given fields: ctx, tenant, action +func (_m *Client) BillingReport(ctx context.Context, tenant string, action string) error { + ret := _m.Called(ctx, tenant, action) - var r0 int - var r1 error - if rf, ok := ret.Get(0).(func(string, string) (int, error)); ok { - return rf(tenant, action) - } - if rf, ok := ret.Get(0).(func(string, string) int); ok { - r0 = rf(tenant, action) - } else { - r0 = ret.Get(0).(int) + if len(ret) == 0 { + panic("no return value specified for BillingReport") } - if rf, ok := ret.Get(1).(func(string, string) error); ok { - r1 = rf(tenant, action) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { + r0 = rf(ctx, tenant, action) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// CreatePrivateKey provides a mock function with given fields: -func (_m *Client) CreatePrivateKey() (*models.PrivateKey, error) { - ret := _m.Called() +// CreatePrivateKey provides a mock function with given fields: ctx +func (_m *Client) CreatePrivateKey(ctx context.Context) (*models.PrivateKey, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for CreatePrivateKey") + } var r0 *models.PrivateKey var r1 error - if rf, ok := ret.Get(0).(func() (*models.PrivateKey, error)); ok { - return rf() + if rf, ok := ret.Get(0).(func(context.Context) (*models.PrivateKey, error)); ok { + return rf(ctx) } - if rf, ok := ret.Get(0).(func() *models.PrivateKey); ok { - r0 = rf() + if rf, ok := ret.Get(0).(func(context.Context) *models.PrivateKey); ok { + r0 = rf(ctx) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.PrivateKey) } } - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) } else { r1 = ret.Error(1) } @@ -95,41 +98,47 @@ func (_m *Client) CreatePrivateKey() (*models.PrivateKey, error) { return r0, r1 } -// DeviceLookup provides a mock function with given fields: lookup -func (_m *Client) DeviceLookup(lookup map[string]string) (*models.Device, []error) { - ret := _m.Called(lookup) +// DeviceLookup provides a mock function with given fields: ctx, tenantID, name +func (_m *Client) DeviceLookup(ctx context.Context, tenantID string, name string) (*models.Device, error) { + ret := _m.Called(ctx, tenantID, name) + + if len(ret) == 0 { + panic("no return value specified for DeviceLookup") + } var r0 *models.Device - var r1 []error - if rf, ok := ret.Get(0).(func(map[string]string) (*models.Device, []error)); ok { - return rf(lookup) + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.Device, error)); ok { + return rf(ctx, tenantID, name) } - if rf, ok := ret.Get(0).(func(map[string]string) *models.Device); ok { - r0 = rf(lookup) + if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.Device); ok { + r0 = rf(ctx, tenantID, name) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Device) } } - if rf, ok := ret.Get(1).(func(map[string]string) []error); ok { - r1 = rf(lookup) + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, tenantID, name) } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).([]error) - } + r1 = ret.Error(1) } return r0, r1 } -// DevicesHeartbeat provides a mock function with given fields: id -func (_m *Client) DevicesHeartbeat(id string) error { - ret := _m.Called(id) +// DevicesHeartbeat provides a mock function with given fields: ctx, uid +func (_m *Client) DevicesHeartbeat(ctx context.Context, uid string) error { + ret := _m.Called(ctx, uid) + + if len(ret) == 0 { + panic("no return value specified for DevicesHeartbeat") + } var r0 error - if rf, ok := ret.Get(0).(func(string) error); ok { - r0 = rf(id) + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, uid) } else { r0 = ret.Error(0) } @@ -137,13 +146,17 @@ func (_m *Client) DevicesHeartbeat(id string) error { return r0 } -// DevicesOffline provides a mock function with given fields: id -func (_m *Client) DevicesOffline(id string) error { - ret := _m.Called(id) +// DevicesOffline provides a mock function with given fields: ctx, uid +func (_m *Client) DevicesOffline(ctx context.Context, uid string) error { + ret := _m.Called(ctx, uid) + + if len(ret) == 0 { + panic("no return value specified for DevicesOffline") + } var r0 error - if rf, ok := ret.Get(0).(func(string) error); ok { - r0 = rf(id) + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, uid) } else { r0 = ret.Error(0) } @@ -151,23 +164,27 @@ func (_m *Client) DevicesOffline(id string) error { return r0 } -// EvaluateKey provides a mock function with given fields: fingerprint, dev, username -func (_m *Client) EvaluateKey(fingerprint string, dev *models.Device, username string) (bool, error) { - ret := _m.Called(fingerprint, dev, username) +// EvaluateKey provides a mock function with given fields: ctx, fingerprint, dev, username +func (_m *Client) EvaluateKey(ctx context.Context, fingerprint string, dev *models.Device, username string) (bool, error) { + ret := _m.Called(ctx, fingerprint, dev, username) + + if len(ret) == 0 { + panic("no return value specified for EvaluateKey") + } var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(string, *models.Device, string) (bool, error)); ok { - return rf(fingerprint, dev, username) + if rf, ok := ret.Get(0).(func(context.Context, string, *models.Device, string) (bool, error)); ok { + return rf(ctx, fingerprint, dev, username) } - if rf, ok := ret.Get(0).(func(string, *models.Device, string) bool); ok { - r0 = rf(fingerprint, dev, username) + if rf, ok := ret.Get(0).(func(context.Context, string, *models.Device, string) bool); ok { + r0 = rf(ctx, fingerprint, dev, username) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(string, *models.Device, string) error); ok { - r1 = rf(fingerprint, dev, username) + if rf, ok := ret.Get(1).(func(context.Context, string, *models.Device, string) error); ok { + r1 = rf(ctx, fingerprint, dev, username) } else { r1 = ret.Error(1) } @@ -175,29 +192,47 @@ func (_m *Client) EvaluateKey(fingerprint string, dev *models.Device, username s return r0, r1 } -// FinishSession provides a mock function with given fields: uid -func (_m *Client) FinishSession(uid string) []error { - ret := _m.Called(uid) +// EventSessionStream provides a mock function with given fields: ctx, uid +func (_m *Client) EventSessionStream(ctx context.Context, uid string) (*websocket.Conn, error) { + ret := _m.Called(ctx, uid) - var r0 []error - if rf, ok := ret.Get(0).(func(string) []error); ok { - r0 = rf(uid) + if len(ret) == 0 { + panic("no return value specified for EventSessionStream") + } + + var r0 *websocket.Conn + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (*websocket.Conn, error)); ok { + return rf(ctx, uid) + } + if rf, ok := ret.Get(0).(func(context.Context, string) *websocket.Conn); ok { + r0 = rf(ctx, uid) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]error) + r0 = ret.Get(0).(*websocket.Conn) } } - return r0 + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, uid) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } -// FirewallEvaluate provides a mock function with given fields: lookup -func (_m *Client) FirewallEvaluate(lookup map[string]string) error { - ret := _m.Called(lookup) +// FinishSession provides a mock function with given fields: ctx, uid +func (_m *Client) FinishSession(ctx context.Context, uid string) error { + ret := _m.Called(ctx, uid) + + if len(ret) == 0 { + panic("no return value specified for FinishSession") + } var r0 error - if rf, ok := ret.Get(0).(func(map[string]string) error); ok { - r0 = rf(lookup) + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, uid) } else { r0 = ret.Error(0) } @@ -205,51 +240,47 @@ func (_m *Client) FirewallEvaluate(lookup map[string]string) error { return r0 } -// GetDevice provides a mock function with given fields: uid -func (_m *Client) GetDevice(uid string) (*models.Device, error) { - ret := _m.Called(uid) +// FirewallEvaluate provides a mock function with given fields: ctx, lookup +func (_m *Client) FirewallEvaluate(ctx context.Context, lookup map[string]string) error { + ret := _m.Called(ctx, lookup) - var r0 *models.Device - var r1 error - if rf, ok := ret.Get(0).(func(string) (*models.Device, error)); ok { - return rf(uid) - } - if rf, ok := ret.Get(0).(func(string) *models.Device); ok { - r0 = rf(uid) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) - } + if len(ret) == 0 { + panic("no return value specified for FirewallEvaluate") } - if rf, ok := ret.Get(1).(func(string) error); ok { - r1 = rf(uid) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, map[string]string) error); ok { + r0 = rf(ctx, lookup) } else { - r1 = ret.Error(1) + r0 = ret.Error(0) } - return r0, r1 + return r0 } -// GetDeviceByPublicURLAddress provides a mock function with given fields: address -func (_m *Client) GetDeviceByPublicURLAddress(address string) (*models.Device, error) { - ret := _m.Called(address) +// GetDevice provides a mock function with given fields: ctx, uid +func (_m *Client) GetDevice(ctx context.Context, uid string) (*models.Device, error) { + ret := _m.Called(ctx, uid) + + if len(ret) == 0 { + panic("no return value specified for GetDevice") + } var r0 *models.Device var r1 error - if rf, ok := ret.Get(0).(func(string) (*models.Device, error)); ok { - return rf(address) + if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Device, error)); ok { + return rf(ctx, uid) } - if rf, ok := ret.Get(0).(func(string) *models.Device); ok { - r0 = rf(address) + if rf, ok := ret.Get(0).(func(context.Context, string) *models.Device); ok { + r0 = rf(ctx, uid) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Device) } } - if rf, ok := ret.Get(1).(func(string) error); ok { - r1 = rf(address) + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, uid) } else { r1 = ret.Error(1) } @@ -257,25 +288,29 @@ func (_m *Client) GetDeviceByPublicURLAddress(address string) (*models.Device, e return r0, r1 } -// GetPublicKey provides a mock function with given fields: fingerprint, tenant -func (_m *Client) GetPublicKey(fingerprint string, tenant string) (*models.PublicKey, error) { - ret := _m.Called(fingerprint, tenant) +// GetPublicKey provides a mock function with given fields: ctx, fingerprint, tenant +func (_m *Client) GetPublicKey(ctx context.Context, fingerprint string, tenant string) (*models.PublicKey, error) { + ret := _m.Called(ctx, fingerprint, tenant) + + if len(ret) == 0 { + panic("no return value specified for GetPublicKey") + } var r0 *models.PublicKey var r1 error - if rf, ok := ret.Get(0).(func(string, string) (*models.PublicKey, error)); ok { - return rf(fingerprint, tenant) + if rf, ok := ret.Get(0).(func(context.Context, string, string) (*models.PublicKey, error)); ok { + return rf(ctx, fingerprint, tenant) } - if rf, ok := ret.Get(0).(func(string, string) *models.PublicKey); ok { - r0 = rf(fingerprint, tenant) + if rf, ok := ret.Get(0).(func(context.Context, string, string) *models.PublicKey); ok { + r0 = rf(ctx, fingerprint, tenant) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.PublicKey) } } - if rf, ok := ret.Get(1).(func(string, string) error); ok { - r1 = rf(fingerprint, tenant) + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, fingerprint, tenant) } else { r1 = ret.Error(1) } @@ -283,41 +318,65 @@ func (_m *Client) GetPublicKey(fingerprint string, tenant string) (*models.Publi return r0, r1 } -// KeepAliveSession provides a mock function with given fields: uid -func (_m *Client) KeepAliveSession(uid string) []error { - ret := _m.Called(uid) +// InviteMember provides a mock function with given fields: ctx, tenantID, userID, forwardedHost +func (_m *Client) InviteMember(ctx context.Context, tenantID string, userID string, forwardedHost string) error { + ret := _m.Called(ctx, tenantID, userID, forwardedHost) + + if len(ret) == 0 { + panic("no return value specified for InviteMember") + } - var r0 []error - if rf, ok := ret.Get(0).(func(string) []error); ok { - r0 = rf(uid) + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok { + r0 = rf(ctx, tenantID, userID, forwardedHost) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]error) - } + r0 = ret.Error(0) + } + + return r0 +} + +// KeepAliveSession provides a mock function with given fields: ctx, uid +func (_m *Client) KeepAliveSession(ctx context.Context, uid string) error { + ret := _m.Called(ctx, uid) + + if len(ret) == 0 { + panic("no return value specified for KeepAliveSession") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, uid) + } else { + r0 = ret.Error(0) } return r0 } -// ListDevices provides a mock function with given fields: -func (_m *Client) ListDevices() ([]models.Device, error) { - ret := _m.Called() +// ListDevices provides a mock function with given fields: ctx +func (_m *Client) ListDevices(ctx context.Context) ([]models.Device, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for ListDevices") + } var r0 []models.Device var r1 error - if rf, ok := ret.Get(0).(func() ([]models.Device, error)); ok { - return rf() + if rf, ok := ret.Get(0).(func(context.Context) ([]models.Device, error)); ok { + return rf(ctx) } - if rf, ok := ret.Get(0).(func() []models.Device); ok { - r0 = rf() + if rf, ok := ret.Get(0).(func(context.Context) []models.Device); ok { + r0 = rf(ctx) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.Device) } } - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) } else { r1 = ret.Error(1) } @@ -325,65 +384,172 @@ func (_m *Client) ListDevices() ([]models.Device, error) { return r0, r1 } -// Lookup provides a mock function with given fields: lookup -func (_m *Client) Lookup(lookup map[string]string) (string, []error) { - ret := _m.Called(lookup) +// Lookup provides a mock function with given fields: ctx, lookup +func (_m *Client) Lookup(ctx context.Context, lookup map[string]string) (string, error) { + ret := _m.Called(ctx, lookup) + + if len(ret) == 0 { + panic("no return value specified for Lookup") + } var r0 string - var r1 []error - if rf, ok := ret.Get(0).(func(map[string]string) (string, []error)); ok { - return rf(lookup) + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, map[string]string) (string, error)); ok { + return rf(ctx, lookup) } - if rf, ok := ret.Get(0).(func(map[string]string) string); ok { - r0 = rf(lookup) + if rf, ok := ret.Get(0).(func(context.Context, map[string]string) string); ok { + r0 = rf(ctx, lookup) } else { r0 = ret.Get(0).(string) } - if rf, ok := ret.Get(1).(func(map[string]string) []error); ok { - r1 = rf(lookup) + if rf, ok := ret.Get(1).(func(context.Context, map[string]string) error); ok { + r1 = rf(ctx, lookup) } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).([]error) - } + r1 = ret.Error(1) } return r0, r1 } -// LookupDevice provides a mock function with given fields: -func (_m *Client) LookupDevice() { - _m.Called() -} +// LookupWebEndpoints provides a mock function with given fields: ctx, address +func (_m *Client) LookupWebEndpoints(ctx context.Context, address string) (*internalclient.WebEndpoint, error) { + ret := _m.Called(ctx, address) -// RecordSession provides a mock function with given fields: session, recordURL -func (_m *Client) RecordSession(session *models.SessionRecorded, recordURL string) { - _m.Called(session, recordURL) + if len(ret) == 0 { + panic("no return value specified for LookupWebEndpoints") + } + + var r0 *internalclient.WebEndpoint + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (*internalclient.WebEndpoint, error)); ok { + return rf(ctx, address) + } + if rf, ok := ret.Get(0).(func(context.Context, string) *internalclient.WebEndpoint); ok { + r0 = rf(ctx, address) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*internalclient.WebEndpoint) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, address) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } -// SessionAsAuthenticated provides a mock function with given fields: uid -func (_m *Client) SessionAsAuthenticated(uid string) []error { - ret := _m.Called(uid) +// NamespaceLookup provides a mock function with given fields: ctx, tenant +func (_m *Client) NamespaceLookup(ctx context.Context, tenant string) (*models.Namespace, error) { + ret := _m.Called(ctx, tenant) + + if len(ret) == 0 { + panic("no return value specified for NamespaceLookup") + } - var r0 []error - if rf, ok := ret.Get(0).(func(string) []error); ok { - r0 = rf(uid) + var r0 *models.Namespace + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (*models.Namespace, error)); ok { + return rf(ctx, tenant) + } + if rf, ok := ret.Get(0).(func(context.Context, string) *models.Namespace); ok { + r0 = rf(ctx, tenant) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]error) + r0 = ret.Get(0).(*models.Namespace) } } + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, tenant) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// SaveSession provides a mock function with given fields: ctx, uid, seat +func (_m *Client) SaveSession(ctx context.Context, uid string, seat int) error { + ret := _m.Called(ctx, uid, seat) + + if len(ret) == 0 { + panic("no return value specified for SaveSession") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, int) error); ok { + r0 = rf(ctx, uid, seat) + } else { + r0 = ret.Error(0) + } + return r0 } -type mockConstructorTestingTNewClient interface { - mock.TestingT - Cleanup(func()) +// SessionAsAuthenticated provides a mock function with given fields: ctx, uid +func (_m *Client) SessionAsAuthenticated(ctx context.Context, uid string) error { + ret := _m.Called(ctx, uid) + + if len(ret) == 0 { + panic("no return value specified for SessionAsAuthenticated") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, uid) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// SessionCreate provides a mock function with given fields: ctx, session +func (_m *Client) SessionCreate(ctx context.Context, session requests.SessionCreate) error { + ret := _m.Called(ctx, session) + + if len(ret) == 0 { + panic("no return value specified for SessionCreate") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, requests.SessionCreate) error); ok { + r0 = rf(ctx, session) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateSession provides a mock function with given fields: ctx, uid, model +func (_m *Client) UpdateSession(ctx context.Context, uid string, model *models.SessionUpdate) error { + ret := _m.Called(ctx, uid, model) + + if len(ret) == 0 { + panic("no return value specified for UpdateSession") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, *models.SessionUpdate) error); ok { + r0 = rf(ctx, uid, model) + } else { + r0 = ret.Error(0) + } + + return r0 } // NewClient creates a new instance of Client. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -func NewClient(t mockConstructorTestingTNewClient) *Client { +// The first argument is typically a *testing.T value. +func NewClient(t interface { + mock.TestingT + Cleanup(func()) +}) *Client { mock := &Client{} mock.Mock.Test(t) diff --git a/pkg/api/internalclient/namespace.go b/pkg/api/internalclient/namespace.go new file mode 100644 index 00000000000..810b8acd08b --- /dev/null +++ b/pkg/api/internalclient/namespace.go @@ -0,0 +1,41 @@ +package internalclient + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/pkg/worker" +) + +// namespaceAPI defines methods for interacting with namespace-related functionality. +type namespaceAPI interface { + // NamespaceLookup retrieves namespace with the specified tenant. + // It returns the namespace and any encountered errors. + NamespaceLookup(ctx context.Context, tenant string) (*models.Namespace, error) + // InviteMember sends an invitation to join the namespace with the specified tenant ID to the + // user with the specified id. The job will use the forwarded host to build the invitation link. + // It returns an error if any and panics if the Client has no worker available. + InviteMember(ctx context.Context, tenantID, userID, forwardedHost string) error +} + +func (c *client) NamespaceLookup(ctx context.Context, tenant string) (*models.Namespace, error) { + namespace := new(models.Namespace) + + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParam("tenant", tenant). + SetResult(namespace). + Get(c.config.APIBaseURL + "/api/namespaces/{tenant}") + if err := HasError(resp, err); err != nil { + return nil, err + } + + return namespace, nil +} + +func (c *client) InviteMember(ctx context.Context, tenantID, userID, forwardedHost string) error { + c.mustWorker() + + return c.worker.Submit(ctx, worker.TaskPattern("cloud-api:invites"), []byte(tenantID+":"+userID+":"+forwardedHost)) +} diff --git a/pkg/api/internalclient/option.go b/pkg/api/internalclient/option.go new file mode 100644 index 00000000000..99ef57ab2c5 --- /dev/null +++ b/pkg/api/internalclient/option.go @@ -0,0 +1,18 @@ +package internalclient + +import "github.com/shellhub-io/shellhub/pkg/worker/asynq" + +type clientOption func(c *client) error + +func WithAsynqWorker(redisURI string) clientOption { //nolint:revive + return func(c *client) error { + asynqClient, err := asynq.NewClient(redisURI) + if err != nil { + return err + } + + c.worker = asynqClient + + return nil + } +} diff --git a/pkg/api/internalclient/session.go b/pkg/api/internalclient/session.go new file mode 100644 index 00000000000..cd396263c20 --- /dev/null +++ b/pkg/api/internalclient/session.go @@ -0,0 +1,147 @@ +package internalclient + +import ( + "context" + "fmt" + "net/http" + "strconv" + "strings" + + "github.com/gorilla/websocket" + "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/sirupsen/logrus" +) + +// sessionAPI defines methods for interacting with session-related functionality. +type sessionAPI interface { + // SessionCreate creates a new session based on the provided session creation request. + // It returns an error if the session creation fails. + SessionCreate(ctx context.Context, session requests.SessionCreate) error + + // SessionAsAuthenticated marks a session with the specified uid as authenticated. + // It returns a slice of errors encountered during the operation. + SessionAsAuthenticated(ctx context.Context, uid string) error + + // FinishSession finishes the session with the specified uid. + // It returns a slice of errors encountered during the operation. + FinishSession(ctx context.Context, uid string) error + + // KeepAliveSession sends a keep-alive signal for the session with the specified uid. + // It returns a slice of errors encountered during the operation. + KeepAliveSession(ctx context.Context, uid string) error + + // UpdateSession updates some fields of [models.Session] using [models.SessionUpdate]. + UpdateSession(ctx context.Context, uid string, model *models.SessionUpdate) error + + // EventSessionStream creates a WebSocket client connection to endpoint to save session's events. + EventSessionStream(ctx context.Context, uid string) (*websocket.Conn, error) + + // SaveSession saves a session as a Asciinema file into the Object Storage and delete + // [models.SessionEventTypePtyOutput] events. + SaveSession(ctx context.Context, uid string, seat int) error +} + +func (c *client) SessionCreate(ctx context.Context, session requests.SessionCreate) error { + resp, err := c.http. + R(). + SetContext(ctx). + SetBody(session). + Post(c.config.APIBaseURL + "/internal/sessions") + + return HasError(resp, err) +} + +func (c *client) SessionAsAuthenticated(ctx context.Context, uid string) error { + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParam("uid", uid). + SetBody(&models.Status{ + Authenticated: true, + }). + Patch(c.config.APIBaseURL + "/internal/sessions/{uid}") + + return HasError(resp, err) +} + +func (c *client) FinishSession(ctx context.Context, uid string) error { + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParam("uid", uid). + Post(c.config.APIBaseURL + "/internal/sessions/{uid}/finish") + + return HasError(resp, err) +} + +func (c *client) KeepAliveSession(ctx context.Context, uid string) error { + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParam("uid", uid). + Post(c.config.APIBaseURL + "/internal/sessions/{uid}/keepalive") + + return HasError(resp, err) +} + +func (c *client) UpdateSession(ctx context.Context, uid string, model *models.SessionUpdate) error { + res, err := c.http. + R(). + SetContext(ctx). + SetPathParams(map[string]string{ + "tenant": uid, + }). + SetBody(model). + Patch(c.config.APIBaseURL + "/internal/sessions/{tenant}") + + return HasError(res, err) +} + +func (c *client) EventSessionStream(ctx context.Context, uid string) (*websocket.Conn, error) { + // Dial the enterprise events websocket. Convert configured enterprise HTTP scheme to ws(s). + scheme := "ws" + if strings.HasPrefix(c.config.APIBaseURL, "https") { + scheme = "wss" + } + + host := strings.TrimPrefix(strings.TrimPrefix(c.config.APIBaseURL, "http://"), "https://") + + connection, _, err := websocket.DefaultDialer.DialContext( + ctx, + fmt.Sprintf("%s://%s/internal/sessions/%s/events", scheme, host, uid), + nil, + ) + if err != nil { + return nil, HasError(nil, err) + } + + return connection, nil +} + +func (c *client) SaveSession(ctx context.Context, uid string, seat int) error { + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParams(map[string]string{ + "uid": uid, + "seat": strconv.Itoa(seat), + }). + Post(c.config.EnterpriseBaseURL + "/internal/sessions/{uid}/records/{seat}") + if err := HasError(resp, err); err != nil { + return err + } + + if resp.StatusCode() == http.StatusNotAcceptable { + // NOTE: [http.StatusNotAcceptable] indicates that session's seat shouldn't be save, but also shouldn't + // represent an error. + logrus.WithFields(logrus.Fields{ + "uid": uid, + "seat": strconv.Itoa(seat), + }).Debug("save session not acceptable") + + return nil + } + + return HasError(resp, err) +} diff --git a/pkg/api/internalclient/sshkey.go b/pkg/api/internalclient/sshkey.go new file mode 100644 index 00000000000..c66969f9179 --- /dev/null +++ b/pkg/api/internalclient/sshkey.go @@ -0,0 +1,73 @@ +package internalclient + +import ( + "context" + + "github.com/shellhub-io/shellhub/pkg/models" +) + +// sshkeyAPI defines methods for interacting with SSH key-related functionality. +type sshkeyAPI interface { + // GetPublicKey retrieves the public key identified by the provided fingerprint and tenant. + GetPublicKey(ctx context.Context, fingerprint, tenant string) (*models.PublicKey, error) + + // CreatePrivateKey creates a new private key. + CreatePrivateKey(ctx context.Context) (*models.PrivateKey, error) + + // EvaluateKey evaluates whether a given public key identified by fingerprint is valid for a device and username combination. + EvaluateKey(ctx context.Context, fingerprint string, dev *models.Device, username string) (bool, error) +} + +func (c *client) GetPublicKey(ctx context.Context, fingerprint, tenant string) (*models.PublicKey, error) { + pubKey := new(models.PublicKey) + + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParams(map[string]string{ + "fingerprint": fingerprint, + "tenant": tenant, + }). + SetResult(&pubKey). + Get(c.config.APIBaseURL + "/internal/sshkeys/public-keys/{fingerprint}/{tenant}") + if err := HasError(resp, err); err != nil { + return nil, err + } + + return pubKey, nil +} + +func (c *client) EvaluateKey(ctx context.Context, fingerprint string, dev *models.Device, username string) (bool, error) { + var evaluate *bool + + resp, err := c.http. + R(). + SetContext(ctx). + SetPathParams(map[string]string{ + "fingerprint": fingerprint, + "username": username, + }). + SetBody(dev). + SetResult(&evaluate). + Post(c.config.APIBaseURL + "/internal/sshkeys/public-keys/evaluate/{fingerprint}/{username}") + if err := HasError(resp, err); err != nil { + return false, err + } + + return *evaluate, nil +} + +func (c *client) CreatePrivateKey(ctx context.Context) (*models.PrivateKey, error) { + privKey := new(models.PrivateKey) + + resp, err := c.http. + R(). + SetContext(ctx). + SetResult(&privKey). + Post(c.config.APIBaseURL + "/internal/sshkeys/private-keys") + if err := HasError(resp, err); err != nil { + return nil, err + } + + return privKey, nil +} diff --git a/pkg/api/internalclient/util.go b/pkg/api/internalclient/util.go new file mode 100644 index 00000000000..8f04cef473d --- /dev/null +++ b/pkg/api/internalclient/util.go @@ -0,0 +1,54 @@ +package internalclient + +// note: code adapted from github.com/labstack/echo/v4@v4.13.4/middleware/util.go. + +import ( + "bufio" + "crypto/rand" + "io" + "sync" +) + +// https://tip.golang.org/doc/go1.19#:~:text=Read%20no%20longer%20buffers%20random%20data%20obtained%20from%20the%20operating%20system%20between%20calls +var randomReaderPool = sync.Pool{New: func() interface{} { + return bufio.NewReader(rand.Reader) +}} + +const ( + randomStringCharset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + randomStringCharsetLen = 52 // len(randomStringCharset) + randomStringMaxByte = 255 - (256 % randomStringCharsetLen) +) + +func randomString(length uint8) string { + reader := randomReaderPool.Get().(*bufio.Reader) + defer randomReaderPool.Put(reader) + + b := make([]byte, length) + r := make([]byte, length+(length/4)) // perf: avoid read from rand.Reader many times + var i uint8 = 0 + + // security note: + // we can't just simply do b[i]=randomStringCharset[rb%len(randomStringCharset)], + // len(len(randomStringCharset)) is 52, and rb is [0, 255], 256 = 52 * 4 + 48. + // make the first 48 characters more possibly to be generated then others. + // So we have to skip bytes when rb > randomStringMaxByte + + for { + _, err := io.ReadFull(reader, r) + if err != nil { + panic("unexpected error happened when reading from bufio.NewReader(crypto/rand.Re der)") + } + for _, rb := range r { + if rb > randomStringMaxByte { + // Skip this number to avoid bias. + continue + } + b[i] = randomStringCharset[rb%randomStringCharsetLen] + i++ + if i == length { + return string(b) + } + } + } +} diff --git a/pkg/api/jwttoken/claims.go b/pkg/api/jwttoken/claims.go new file mode 100644 index 00000000000..45d8bdae974 --- /dev/null +++ b/pkg/api/jwttoken/claims.go @@ -0,0 +1,145 @@ +package jwttoken + +import ( + "crypto/rsa" + "errors" + "strings" + "time" + + "github.com/golang-jwt/jwt/v4" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/uuid" +) + +// TODO: +// 1. Rename [user|device]Claims.Kind JSON's tag to "kind". (BREAKING CHANGE) +// 2. Rename this package to jwt. + +type ( + // claimKind represents the type of claims used in JWT tokens. + claimKind string + + // userClaims is an auxiliary type that embeds [github.com/golang-jwt/jwt/v4.RegisteredClaims] + // into [github.com/shellhub-io/shellhub/pkg/api/authorizer.UserClaims] to convert it into + // [github.com/golang-jwt/jwt/v4.Claims] for use in an [encode] call. + userClaims struct { + Kind claimKind `json:"claims"` + authorizer.UserClaims + jwt.RegisteredClaims + } + + // deviceClaims is an auxiliary type that embeds [github.com/golang-jwt/jwt/v4.RegisteredClaims] + // into [github.com/shellhub-io/shellhub/pkg/api/authorizer.DeviceClaims] to convert it into + // [github.com/golang-jwt/jwt/v4.Claims] for use in an [encode] call. + deviceClaims struct { + Kind claimKind `json:"claims"` + authorizer.DeviceClaims + jwt.RegisteredClaims + } +) + +const ( + kindUserClaims claimKind = "user" + kindDeviceClaims claimKind = "device" + kindUnknownClaims claimKind = "unknown" +) + +// claimKindFromString converts a string to a claimKind. +func claimKindFromString(str string) claimKind { + switch str { + case "user": + return kindUserClaims + case "device": + return kindDeviceClaims + default: + return kindUnknownClaims + } +} + +// EncodeUserClaims encodes the provided user claims into a signed JWT token. It returns +// the encoded token and an error, if any. +// +// The token is valid for 72 hours; tenantID is optional. +func EncodeUserClaims(claims authorizer.UserClaims, privateKey *rsa.PrivateKey) (string, error) { + now := time.Now() + jwtClaims := userClaims{ + Kind: kindUserClaims, + UserClaims: claims, + RegisteredClaims: jwt.RegisteredClaims{ + ID: uuid.Generate(), + Issuer: "", // TODO: how can we get the correct issuer? + IssuedAt: jwt.NewNumericDate(now), + NotBefore: jwt.NewNumericDate(now), + ExpiresAt: jwt.NewNumericDate(now.Add(time.Hour * 72)), + }, + } + + token, err := encodeClaims(jwtClaims, privateKey) + if err != nil { + return "", err + } + + return token, nil +} + +// EncodeDeviceClaims encodes the provided device claims into a signed JWT token. It returns +// the encoded token and an error, if any. +func EncodeDeviceClaims(claims authorizer.DeviceClaims, privateKey *rsa.PrivateKey) (string, error) { + now := clock.Now() + jwtClaims := deviceClaims{ + Kind: kindDeviceClaims, + DeviceClaims: claims, + RegisteredClaims: jwt.RegisteredClaims{ + ID: uuid.Generate(), + Issuer: "", // TODO: how can we get the correct issuer? + IssuedAt: jwt.NewNumericDate(now), + NotBefore: jwt.NewNumericDate(now), + }, + } + + token, err := encodeClaims(jwtClaims, privateKey) + if err != nil { + return "", err + } + + return token, nil +} + +// ClaimsFromBearerToken decodes the provided bearer token into either [github.com/shellhub-io/shellhub/pkg/api/authorizer.UserClaims] +// or [github.com/shellhub-io/shellhub/pkg/api/authorizer.DeviceClaims]. It returns the decoded claims and an error if any. +func ClaimsFromBearerToken(publicKey *rsa.PublicKey, bearerToken string) (interface{}, error) { + raw := strings.ReplaceAll(bearerToken, "Bearer ", "") + + return unmarshalBearerToken(publicKey, raw) +} + +func unmarshalBearerToken(publicKey *rsa.PublicKey, raw string) (interface{}, error) { + kindAux := struct { + Kind string `json:"claims"` + jwt.RegisteredClaims + }{} + + if _, err := jwt.ParseWithClaims(raw, &kindAux, evalClaims(publicKey)); err != nil { + return nil, err + } + + switch claimKindFromString(kindAux.Kind) { + case kindUserClaims: + claims := new(userClaims) + if err := decodeClaims(publicKey, raw, claims); err != nil { + return nil, err + } + + return &claims.UserClaims, nil + case kindDeviceClaims: + claims := new(deviceClaims) + if err := decodeClaims(publicKey, raw, claims); err != nil { + return nil, err + } + + return &claims.DeviceClaims, nil + default: + return nil, errors.New("invalid JWT's kind") + } +} diff --git a/pkg/api/jwttoken/encoder.go b/pkg/api/jwttoken/encoder.go new file mode 100644 index 00000000000..bbdb08d3b0d --- /dev/null +++ b/pkg/api/jwttoken/encoder.go @@ -0,0 +1,33 @@ +package jwttoken + +import ( + "crypto/rsa" + "fmt" + + "github.com/golang-jwt/jwt/v4" +) + +// encodeClaims encodes the provided claims into a JWT token using the provided RSA private key. +// It returns the encoded JWT token as a string and any error encountered during the encoding process. +// The claims are signed using the RS256 signing method. +func encodeClaims(claims jwt.Claims, privateKey *rsa.PrivateKey) (string, error) { + return jwt.NewWithClaims(jwt.SigningMethodRS256, claims).SignedString(privateKey) +} + +// decodeClaims decodes the raw JWT into claims. +func decodeClaims[T jwt.Claims](publicKey *rsa.PublicKey, raw string, claims T) error { + _, err := jwt.ParseWithClaims(raw, claims, evalClaims(publicKey), jwt.WithValidMethods([]string{jwt.SigningMethodRS256.Alg()})) + + return err +} + +// evalClaims evaluates if a token is valid. +func evalClaims(publicKey *rsa.PublicKey) jwt.Keyfunc { + return func(t *jwt.Token) (interface{}, error) { + if _, ok := t.Method.(*jwt.SigningMethodRSA); !ok { + return nil, fmt.Errorf("unexpected signature method: %v", t.Header["alg"]) + } + + return publicKey, nil + } +} diff --git a/pkg/api/jwttoken/jwttoken.go b/pkg/api/jwttoken/jwttoken.go deleted file mode 100644 index 47905a274c0..00000000000 --- a/pkg/api/jwttoken/jwttoken.go +++ /dev/null @@ -1,105 +0,0 @@ -package jwttoken - -import ( - "crypto/rsa" - "errors" - "time" - - "github.com/golang-jwt/jwt/v4" - "github.com/shellhub-io/shellhub/pkg/clock" - "github.com/shellhub-io/shellhub/pkg/uuid" -) - -var ErrMissingPrivateKey = errors.New("missing private key while trying to sign the token") - -type Claims interface { - jwt.Claims - SetRegisteredClaims(claims jwt.RegisteredClaims) -} - -type Token struct { - registeredClaims jwt.RegisteredClaims - privateKey *rsa.PrivateKey - claims Claims - method jwt.SigningMethod - raw *jwt.Token - str string -} - -// New creates a new Token that can be signed as a JWT token, pre-filled with default values for "jti," "iat," and "iss". -// and can be modified using the "WithExpire" method. It also provides a default signing method, RS256, which can be -// customized with the "WithMethod" method. To include non-default claims, use the "WithClaims" method. You must -// provide a valid private key using the "WithPrivateKey" method. To complete the token creation, use the "Sign" method. -func New() *Token { - return &Token{ - str: "", - raw: nil, - claims: nil, - method: jwt.SigningMethodRS256, - privateKey: nil, - registeredClaims: jwt.RegisteredClaims{ - ID: uuid.Generate(), - Issuer: "https://cloud.shellhub.io", - IssuedAt: jwt.NewNumericDate(clock.Now().UTC()), - }, - } -} - -// WithClaims sets the costum claims of the token. It will subscribe any predefined RegisteredClaims. -func (t *Token) WithClaims(claims Claims) *Token { - claims.SetRegisteredClaims(t.registeredClaims) - t.claims = claims - - return t -} - -// WithExpire sets the expiration time for the JWT. -func (t *Token) WithExpire(exp time.Time) *Token { - t.registeredClaims.ExpiresAt = jwt.NewNumericDate(exp) - - return t -} - -// WithMethod sets the signing method for the JWT. Default is RS256. -func (t *Token) WithMethod(method jwt.SigningMethod) *Token { - t.method = method - - return t -} - -// WithPrivateKey sets the private key for signing the JWT. -func (t *Token) WithPrivateKey(pk *rsa.PrivateKey) *Token { - t.privateKey = pk - - return t -} - -// Sign finalizes the configuration of the JWT and signs it with the private key. -// If no custom claims have been set, it will sign an empty token with only the "jti" claim. -func (t *Token) Sign() (*Token, error) { - if t.privateKey == nil { - return nil, ErrMissingPrivateKey - } - - var token *jwt.Token - if t.claims != nil { - token = jwt.NewWithClaims(t.method, t.claims) - } else { - token = jwt.NewWithClaims(t.method, t.registeredClaims) - } - - tokenStr, err := token.SignedString(t.privateKey) - if err != nil { - return nil, err - } - - t.raw = token - t.str = tokenStr - - return t, nil -} - -// String returns the string representation of the signed JWT. -func (t *Token) String() string { - return t.str -} diff --git a/pkg/api/order/query.go b/pkg/api/order/query.go deleted file mode 100644 index 69f70b872ce..00000000000 --- a/pkg/api/order/query.go +++ /dev/null @@ -1,16 +0,0 @@ -package order - -const ( - Asc = "asc" - Desc = "desc" -) - -type Query struct { - OrderBy string `query:"order_by" validate:"omitempty,oneof=asc desc"` -} - -func (q *Query) Normalize() { - if q.OrderBy == "" || (q.OrderBy != Asc && q.OrderBy != Desc) { - q.OrderBy = Desc - } -} diff --git a/pkg/api/paginator/doc.go b/pkg/api/paginator/doc.go deleted file mode 100644 index 320a6d01129..00000000000 --- a/pkg/api/paginator/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package paginator provides a simple pagination utility for managing paginated queries. -package paginator diff --git a/pkg/api/paginator/query.go b/pkg/api/paginator/query.go deleted file mode 100644 index 21445924cc0..00000000000 --- a/pkg/api/paginator/query.go +++ /dev/null @@ -1,36 +0,0 @@ -package paginator - -import ( - "math" -) - -const ( - MinPage = 1 // MinPage represents the minimum allowed value for the pagination query's Page parameter. - MinPerPage = 1 // MinPerPage represents the minimum allowed value for the pagination query's PerPage parameter. - MaxPerPage = 100 // MaxPerPage represents the maximum allowed value for the pagination query's PerPage parameter. -) - -// Query represents the pagination query parameters. -type Query struct { - Page int `query:"page"` // Page represents the current page number. - PerPage int `query:"per_page"` // PerPage represents the number of items per page. -} - -// NewQuery creates a new pagination query with default values. If a custom pagination is -// provided in the request, it is advisable to normalize the values using the `Query.Normalize()` -// method. -func NewQuery() *Query { - return &Query{ - Page: MinPage, - PerPage: MinPerPage, - } -} - -// Normalize ensures valid values for Page and PerPage in the pagination query. -// If query.PerPage is less than one, it is set to `MinPerPage`. -// If query.Page is less than one, it is set to `MinPage`. -// The maximum allowed value for query.PerPage is `MaxPerPage`. -func (q *Query) Normalize() { - q.PerPage = int(math.Max(math.Min(float64(q.PerPage), float64(MaxPerPage)), float64(MinPerPage))) - q.Page = int(math.Max(float64(MinPage), float64(q.Page))) -} diff --git a/pkg/api/paginator/query_test.go b/pkg/api/paginator/query_test.go deleted file mode 100644 index aa35e3a2dd9..00000000000 --- a/pkg/api/paginator/query_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package paginator - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestNormalize(t *testing.T) { - cases := []struct { - description string - query *Query - expected *Query - }{ - { - description: "set Page to MinParge when Page is lower than 1", - query: &Query{Page: -2, PerPage: 100}, - expected: &Query{Page: 1, PerPage: 100}, - }, - { - description: "set PerPage to MinPerParge when PerPage is lower than 1", - query: &Query{Page: 1, PerPage: -2}, - expected: &Query{Page: 1, PerPage: 1}, - }, - { - description: "set PerPage to MaxPerParge when PerPage is greather than 100", - query: &Query{Page: 1, PerPage: 101}, - expected: &Query{Page: 1, PerPage: 100}, - }, - { - description: "successfully parse query", - query: &Query{Page: 8, PerPage: 78}, - expected: &Query{Page: 8, PerPage: 78}, - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - tc.query.Normalize() - assert.Equal(t, tc.expected, tc.query) - }) - } -} diff --git a/pkg/api/query/filter.go b/pkg/api/query/filter.go new file mode 100644 index 00000000000..f3fc41056a1 --- /dev/null +++ b/pkg/api/query/filter.go @@ -0,0 +1,82 @@ +package query + +import ( + "encoding/base64" + "encoding/json" + "errors" +) + +var ( + ErrFilterInvalid = errors.New("filter is invalid") + ErrFilterPropertyInvalid = errors.New("filter property is not valid") + ErrFilterOperatorInvalid = errors.New("filter operator is not valid") +) + +// Filters represents a set of filters that can be applied to queries. +type Filters struct { + // Raw holds the raw data of the filter and it's a base64-encoded JSON. + Raw string `query:"filter"` + + // Data stores the decoded filters; it's automatically populated with the Unmarshal method. + Data []Filter +} + +// NewFilters creates a new instance of Filters with an empty Data slice. +func NewFilters() *Filters { + return &Filters{Data: nil} +} + +// Unmarshal decodes and unmarshals the raw filters, populating the Data attribute. +func (fs *Filters) Unmarshal() error { + raw, err := base64.StdEncoding.DecodeString(fs.Raw) + if err != nil { + return err + } + + if err := json.Unmarshal(raw, &fs.Data); len(raw) > 0 && err != nil { + return err + } + + return nil +} + +type Filter struct { + Type string `json:"type,omitempty"` + Params interface{} `json:"params,omitempty"` +} + +func (f *Filter) UnmarshalJSON(data []byte) error { + var params json.RawMessage + + type filter Filter + aux := filter{ + Params: ¶ms, + } + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + f.Type = aux.Type + + switch f.Type { + case FilterTypeProperty: + var property FilterProperty + if err := json.Unmarshal(params, &property); err != nil { + return err + } + f.Params = &property + + return nil + case FilterTypeOperator: + var operator FilterOperator + if err := json.Unmarshal(params, &operator); err != nil { + return err + } + f.Params = &operator + + return nil + default: + return ErrFilterInvalid + } +} diff --git a/pkg/api/query/filter_test.go b/pkg/api/query/filter_test.go new file mode 100644 index 00000000000..cf4915d4da8 --- /dev/null +++ b/pkg/api/query/filter_test.go @@ -0,0 +1,64 @@ +package query + +import ( + "encoding/base64" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFilterUnmarshalJSON(t *testing.T) { + cases := []struct { + description string + filter *Filter + data string + expected error + }{ + { + description: "", + filter: &Filter{ + Type: "property", + Params: FilterProperty{ + Name: "online", + Operator: "bool", + Value: "true", + }, + }, + // { + // "type": "property", + // "params": { + // "name": "online", + // "operator": "bool", + // "value": "true" + // } + // } + data: "ewogICAgInR5cGUiOiAicHJvcGVydHkiLAogICAgInBhcmFtcyI6IHsKICAgICAgICAibmFtZSI6ICJvbmxpbmUiLAogICAgICAgICJvcGVyYXRvciI6ICJib29sIiwKICAgICAgICAidmFsdWUiOiAidHJ1ZSIKICAgIH0KfQ==", + expected: nil, + }, + { + description: "", + filter: &Filter{ + Type: "operator", + Params: FilterOperator{ + Name: "and", + }, + }, + // { + // "type": "operator", + // "params": { + // "name": "and" + // } + // } + data: "ewogICAgInR5cGUiOiAib3BlcmF0b3IiLAogICAgInBhcmFtcyI6IHsKICAgICAgICAibmFtZSI6ICJhbmQiCiAgICB9Cn0=", + expected: nil, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + raw, err := base64.StdEncoding.DecodeString(tc.data) + assert.NoError(t, err) + assert.Equal(t, tc.expected, tc.filter.UnmarshalJSON(raw)) + }) + } +} diff --git a/pkg/api/query/filter_util.go b/pkg/api/query/filter_util.go new file mode 100644 index 00000000000..22735a77614 --- /dev/null +++ b/pkg/api/query/filter_util.go @@ -0,0 +1,35 @@ +package query + +const ( + FilterTypeProperty = "property" + FilterTypeOperator = "operator" +) + +// FilterProperty is a JSON representation of a property expression in a query. +// +// Name is the attribute to be observed in the operation, Operator is the operation, and +// Value is the value used in the operation. While Name can be any string, the Operator must be supported +// by the implementation, and the Value is the value used in the operation. +// +// Each operator has its own implementation, and one operator can have multiple implementations. For that reason, +// the operator must be converted to a useful value using build methods. +// +// Examples: +// A FilterProperty with Operator "gt", Name "count", and Value 12 will filter documents with the attribute "count" greater than 12. +// Another FilterProperty with Operator "eq", Name "alias", and Value "foobar" will filter documents with the attribute "alias" equal to "foobar". +type FilterProperty struct { + // Name is the attribute to be observed in the operation. + Name string `json:"name"` + + // Operator is the operation (e.g., "eq" for equal). + Operator string `json:"operator"` + + // Value is the value used in the operation. (e.g., "eq" operations use Value to determine the value to be equal). + Value interface{} `json:"value"` +} + +// FilterOperator represents a JSON representation of a filter operator in a query (e.g., "and", "or" in MongoDB queries). +type FilterOperator struct { + // Name is the filter operator (e.g., "and", "or"). + Name string `json:"name"` +} diff --git a/pkg/api/query/paginator.go b/pkg/api/query/paginator.go new file mode 100644 index 00000000000..b5caf202fed --- /dev/null +++ b/pkg/api/query/paginator.go @@ -0,0 +1,41 @@ +package query + +import "math" + +const ( + MinPage = 1 // MinPage represents the minimum allowed value for the pagination query's Page parameter. + MinPerPage = 1 // MinPerPage represents the minimum allowed value for the pagination query's PerPage parameter. + DefaultPerPage = 10 // DefaultPerPage represents the default value for the pagination query's PerPage parameter. + MaxPerPage = 100 // MaxPerPage represents the maximum allowed value for the pagination query's PerPage parameter. +) + +// Paginator represents the paginator parameters in a query. +type Paginator struct { + // Page represents the current page number. + Page int `query:"page"` + + // PerPage represents the number of items per page. + PerPage int `query:"per_page"` +} + +// NewPaginator creates and returns a new Paginator instance with MinPage and DefaultPerPage. +func NewPaginator() *Paginator { + return &Paginator{ + Page: MinPage, + PerPage: DefaultPerPage, + } +} + +// Normalize ensures valid values for Page and PerPage in the pagination query. +// If query.PerPage is less than zero, it is set to `DefaultPerPage`. +// If query.Page is less than one, it is set to `MinPage`. +// The maximum allowed value for query.PerPage is `MaxPerPage`. +func (p *Paginator) Normalize() { + p.Page = int(math.Max(float64(MinPage), float64(p.Page))) + + if p.PerPage == 0 { + p.PerPage = DefaultPerPage + } else { + p.PerPage = int(math.Max(math.Min(float64(p.PerPage), float64(MaxPerPage)), float64(MinPerPage))) + } +} diff --git a/pkg/api/query/paginator_test.go b/pkg/api/query/paginator_test.go new file mode 100644 index 00000000000..06ec411eae1 --- /dev/null +++ b/pkg/api/query/paginator_test.go @@ -0,0 +1,47 @@ +package query + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPaginatorNormalize(t *testing.T) { + cases := []struct { + description string + paginator *Paginator + expected *Paginator + }{ + { + description: "set Page to MinParge when Page is lower than 1", + paginator: &Paginator{Page: -2, PerPage: 100}, + expected: &Paginator{Page: 1, PerPage: 100}, + }, + { + description: "set PerPage to DefaultPerParge when PerPage is lower than 1", + paginator: &Paginator{Page: 1, PerPage: -2}, + expected: &Paginator{Page: 1, PerPage: 1}, + }, + { + description: "set PerPage to MaxPerParge when PerPage is greater than 100", + paginator: &Paginator{Page: 1, PerPage: 101}, + expected: &Paginator{Page: 1, PerPage: 100}, + }, + { + description: "set PerPage to DefaultPerPage when PerPage is 0", + paginator: &Paginator{Page: 1, PerPage: 0}, + expected: &Paginator{Page: 1, PerPage: 10}, + }, + { + description: "successfully parse query", + paginator: &Paginator{Page: 8, PerPage: 78}, + expected: &Paginator{Page: 8, PerPage: 78}, + }, + } + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.paginator.Normalize() + assert.Equal(t, tc.expected, tc.paginator) + }) + } +} diff --git a/pkg/api/query/sorter.go b/pkg/api/query/sorter.go new file mode 100644 index 00000000000..81459c1a10d --- /dev/null +++ b/pkg/api/query/sorter.go @@ -0,0 +1,28 @@ +package query + +const ( + OrderAsc = "asc" + OrderDesc = "desc" +) + +// Sorter represents the sorting order in a query. +type Sorter struct { + By string `query:"sort_by"` + Order string `query:"order_by" validate:"omitempty,oneof=asc desc"` +} + +// NewOrder creates and returns a new Sort instance with the default descending order. +func NewSorter() *Sorter { + return &Sorter{ + By: "", + Order: OrderDesc, + } +} + +// Normalize ensures that the sorting order is valid. +// If an invalid order is provided, it defaults to descending order. +func (s *Sorter) Normalize() { + if s.Order != OrderAsc && s.Order != OrderDesc { + s.Order = OrderDesc + } +} diff --git a/pkg/api/query/sorter_test.go b/pkg/api/query/sorter_test.go new file mode 100644 index 00000000000..6b1d7fec186 --- /dev/null +++ b/pkg/api/query/sorter_test.go @@ -0,0 +1,32 @@ +package query + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSortNormalize(t *testing.T) { + cases := []struct { + description string + order *Sorter + expected *Sorter + }{ + { + description: "sets By to desc when old By is invalid", + order: &Sorter{By: "date", Order: "foo"}, + expected: &Sorter{By: "date", Order: "desc"}, + }, + { + description: "successfully parse query", + order: &Sorter{By: "date", Order: "asc"}, + expected: &Sorter{By: "date", Order: "asc"}, + }, + } + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.order.Normalize() + assert.Equal(t, tc.expected, tc.order) + }) + } +} diff --git a/pkg/api/requests/api-key.go b/pkg/api/requests/api-key.go new file mode 100644 index 00000000000..673fc197480 --- /dev/null +++ b/pkg/api/requests/api-key.go @@ -0,0 +1,37 @@ +package requests + +import ( + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/query" +) + +type CreateAPIKey struct { + UserID string `header:"X-ID"` + TenantID string `header:"X-Tenant-ID"` + Role authorizer.Role `header:"X-Role"` + Name string `json:"name" validate:"required,api-key_name"` + ExpiresAt int `json:"expires_at" validate:"required,api-key_expires-at"` + Key string `json:"key" validate:"omitempty,uuid"` + OptRole authorizer.Role `json:"role" validate:"omitempty,member_role"` +} + +type ListAPIKey struct { + TenantID string `header:"X-Tenant-ID"` + query.Paginator + query.Sorter +} + +type UpdateAPIKey struct { + UserID string `header:"X-ID"` + TenantID string `header:"X-Tenant-ID"` + // CurrentName is the current stored name. It is different from [UpdateAPIKey.Name], which is used + // to handle the new target name (optional). + CurrentName string `param:"name" validate:"required"` + Name string `json:"name" validate:"omitempty,api-key_name"` + Role authorizer.Role `json:"role" validate:"omitempty,member_role"` +} + +type DeleteAPIKey struct { + TenantID string `header:"X-Tenant-ID"` + Name string `param:"name" validate:"required"` +} diff --git a/pkg/api/requests/auth.go b/pkg/api/requests/auth.go index 724bc7dca0d..0e9211bb892 100644 --- a/pkg/api/requests/auth.go +++ b/pkg/api/requests/auth.go @@ -3,10 +3,14 @@ package requests // AuthTokenGet is the structure to represent the request data for get auth token endpoint. type AuthTokenGet struct { UserParam - MFA bool `json:"mfa"` } // AuthTokenSwap is the structure to represent the request data for swap auth token endpoint. type AuthTokenSwap struct { TenantParam } + +type CreateUserToken struct { + UserID string `param:"id" header:"X-ID" validate:"required"` + TenantID string `param:"tenant" validate:"omitempty,uuid"` +} diff --git a/pkg/api/requests/device.go b/pkg/api/requests/device.go index 75311303214..4d328cba559 100644 --- a/pkg/api/requests/device.go +++ b/pkg/api/requests/device.go @@ -1,5 +1,24 @@ package requests +import ( + "github.com/shellhub-io/shellhub/pkg/api/query" + "github.com/shellhub-io/shellhub/pkg/models" +) + +type DeviceList struct { + TenantID string `header:"X-Tenant-ID"` + DeviceStatus models.DeviceStatus `query:"status"` // TODO: validate + query.Paginator + query.Sorter + query.Filters +} + +type DeviceUpdate struct { + TenantID string `header:"X-Tenant-ID"` + UID string `param:"uid" validate:"required"` + Name string `json:"name" validate:"device_name,omitempty"` +} + // DeviceParam is a structure to represent and validate a device UID as path param. type DeviceParam struct { UID string `param:"uid" validate:"required"` @@ -10,6 +29,12 @@ type DeviceGet struct { DeviceParam } +type ResolveDevice struct { + TenantID string `header:"X-Tenant-ID" validate:"required"` + UID string `query:"uid" validate:"omitempty"` + Hostname string `query:"hostname" validate:"omitempty"` +} + // DeviceDelete is the structure to represent the request data for delete device endpoint. type DeviceDelete struct { DeviceParam @@ -28,21 +53,15 @@ type DeviceOffline struct { // DeviceLookup is the structure to represent the request data for lookup device endpoint. type DeviceLookup struct { - Domain string `query:"domain" validate:"required"` - Name string `query:"name" validate:"required"` - Username string `query:"username" validate:""` - IPAddress string `query:"ip_address" validate:""` + TenantID string `query:"tenant_id" validate:"required"` + Name string `query:"name" validate:"required"` } // DeviceStatus is the structure to represent the request data for update device status to pending endpoint. type DeviceUpdateStatus struct { - DeviceParam - Status string `param:"status" validate:"required,oneof=accept reject pending unused"` -} - -// DeviceHeartbeat is the structure to represent the request data for device heartbeat endpoint. -type DeviceHeartbeat struct { - DeviceParam + TenantID string `header:"X-Tenant-ID"` + UID string `param:"uid" validate:"required"` + Status string `param:"status" validate:"required,oneof=accepted pending rejected"` } // DeviceCreateTag is the structure to represent the request data for device create tag endpoint. @@ -79,23 +98,9 @@ type DeviceInfo struct { type DeviceAuth struct { Info *DeviceInfo `json:"info" validate:"required"` Sessions []string `json:"sessions,omitempty"` - Hostname string `json:"hostname,omitempty" validate:"required_without=Identity,omitempty,hostname_rfc1123" hash:"-"` + Hostname string `json:"hostname,omitempty" validate:"required_without=Identity,omitempty,device_name" hash:"-"` Identity *DeviceIdentity `json:"identity,omitempty" validate:"required_without=Hostname,omitempty"` PublicKey string `json:"public_key" validate:"required"` TenantID string `json:"tenant_id" validate:"required"` -} - -type DeviceGetPublicURL struct { - DeviceParam -} - -type DeviceUpdate struct { - DeviceParam - // NOTICE: the pointers here help to distinguish between the zero value and the absence of the field. - Name *string `json:"name"` - PublicURL *bool `json:"public_url"` -} - -type DevicePublicURLAddress struct { - PublicURLAddress string `param:"address" validate:"required"` + RealIP string `header:"X-Real-IP"` } diff --git a/pkg/api/requests/namespace.go b/pkg/api/requests/namespace.go index 926e0f95fa0..75ca6dfc002 100644 --- a/pkg/api/requests/namespace.go +++ b/pkg/api/requests/namespace.go @@ -1,8 +1,13 @@ package requests +import ( + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/api/query" +) + // TenantParam is a structure to represent and validate a namespace tenant as path param. type TenantParam struct { - Tenant string `param:"tenant" validate:"required,min=3,max=255,ascii,excludes=/@&:"` + Tenant string `param:"tenant" validate:"required,uuid"` } // RoleBody is a structure to represent and validate a namespace role as request body. @@ -15,10 +20,18 @@ type MemberParam struct { MemberUID string `param:"uid" validate:"required"` } +// NamespaceCreate is the structure to represent the request data for create namespace endpoint. +type NamespaceList struct { + query.Paginator + query.Filters +} + // NamespaceCreate is the structure to represent the request data for create namespace endpoint. type NamespaceCreate struct { + UserID string `header:"X-ID" validate:"required"` Name string `json:"name" validate:"required,hostname_rfc1123,excludes=."` - TenantID string `json:"tenant" validate:"min=3,max=255,ascii,excludes=/@&:"` + TenantID string `json:"tenant" validate:"omitempty,uuid"` + Type string `json:"type" validate:"omitempty,lowercase,oneof=personal team"` } // NamespaceGet is the structure to represent the request data for get namespace endpoint. @@ -34,27 +47,40 @@ type NamespaceDelete struct { // NamespaceEdit is the structure to represent the request data for edit namespace endpoint. type NamespaceEdit struct { TenantParam - Name string `json:"name" validate:"required,hostname_rfc1123,excludes=."` + Name string `json:"name" validate:"omitempty,hostname_rfc1123,excludes=."` + Settings struct { + SessionRecord *bool `json:"session_record" validate:"omitempty"` + ConnectionAnnouncement *string `json:"connection_announcement" validate:"omitempty,min=0,max=4096"` + } `json:"settings"` } -// NamespaceAddUser is the structure to represent the request data for add member to namespace endpoint. -type NamespaceAddUser struct { - TenantParam - Username string `json:"username" validate:"required"` - RoleBody +type NamespaceAddMember struct { + FowardedHost string `header:"X-Forwarded-Host" validate:"required"` + UserID string `header:"X-ID" validate:"required"` + TenantID string `param:"tenant" validate:"required,uuid"` + MemberEmail string `json:"email" validate:"required"` + MemberRole authorizer.Role `json:"role" validate:"required,member_role"` } -// NamespaceRemoveUser is the structure to represent the request data for remove member from namespace endpoint. -type NamespaceRemoveUser struct { - TenantParam - MemberParam +type NamespaceUpdateMember struct { + UserID string `header:"X-ID" validate:"required"` + TenantID string `param:"tenant" validate:"required,uuid"` + MemberID string `param:"uid" validate:"required"` + MemberRole authorizer.Role `json:"role" validate:"omitempty,member_role"` } -// NamespaceEditUser is the structure to represent the request data for edit member from namespace endpoint. -type NamespaceEditUser struct { - TenantParam - MemberParam - RoleBody +type NamespaceRemoveMember struct { + UserID string `header:"X-ID" validate:"required"` + TenantID string `param:"tenant" validate:"required,uuid"` + MemberID string `param:"uid" validate:"required"` +} + +type LeaveNamespace struct { + UserID string `header:"X-ID" validate:"required"` + // TenantID represents the namespace that the user intends to leave. + TenantID string `param:"tenant" validate:"required,uuid"` + // AuthenticatedTenantID represents the namespace to which the user is currently authenticated. + AuthenticatedTenantID string `header:"X-Tenant-ID" validate:"required"` } // SessionEditRecordStatus is the structure to represent the request data for edit session record status endpoint. diff --git a/pkg/api/requests/publickey.go b/pkg/api/requests/publickey.go index e6c83748c52..477d8dad4db 100644 --- a/pkg/api/requests/publickey.go +++ b/pkg/api/requests/publickey.go @@ -1,10 +1,17 @@ package requests +import "github.com/shellhub-io/shellhub/pkg/api/query" + // FingerprintParam is a structure to represent and validate a public key fingerprint as path param. type FingerprintParam struct { Fingerprint string `param:"fingerprint" validate:"required"` } +type ListPublicKeys struct { + TenantID string `header:"X-Tenant-ID"` + query.Paginator +} + // PublicKeyGet is the structure to represent the request data for get public key endpoint. type PublicKeyGet struct { FingerprintParam @@ -12,12 +19,8 @@ type PublicKeyGet struct { } type PublicKeyFilter struct { - Hostname string `json:"hostname,omitempty" validate:"required_without=Tags,excluded_with=Tags,regexp"` - // FIXME: add validation for tags when it has at least one item. - // - // If used `min=1` to do that validation, when tags is empty, its zero value, and only hostname is provided, - // it throws a error even with `required_without` and `excluded_with`. - Tags []string `json:"tags,omitempty" validate:"required_without=Hostname,excluded_with=Hostname,max=3,unique,dive,min=3,max=255,alphanum,ascii,excludes=/@&:"` + Hostname string `json:"hostname,omitempty" validate:"required_without=Tags,excluded_with=Tags,regexp"` + Tags []string `json:"tags,omitempty" validate:"required_without=Hostname"` } // PublicKeyCreate is the structure to represent the request data for create public key endpoint. diff --git a/pkg/api/requests/session.go b/pkg/api/requests/session.go index d74054ce35a..d97b468711f 100644 --- a/pkg/api/requests/session.go +++ b/pkg/api/requests/session.go @@ -1,11 +1,22 @@ package requests +import ( + "time" + + "github.com/shellhub-io/shellhub/pkg/api/query" +) + // SessionIDParam is a structure to represent and validate a session UID as path param. type SessionIDParam struct { // UID is the session's UID. UID string `param:"uid" validate:"required"` } +type ListSessions struct { + TenantID string `header:"X-Tenant-ID"` + query.Paginator +} + // SessionGet is the structure to represent the request data for get session endpoint. type SessionGet struct { SessionIDParam @@ -36,3 +47,22 @@ type SessionFinish struct { type SessionKeepAlive struct { SessionIDParam } + +type SessionUpdate struct { + SessionIDParam + Recorded *bool `json:"recorded"` + Authenticated *bool `json:"authenticated"` + Type *string `json:"type"` +} + +type SessionEvent struct { + Type string `json:"type" validate:"required"` + Timestamp time.Time `json:"timestamp" validate:"required"` + Data any `json:"data" validate:"required"` + Seat int `json:"seat" validate:"min=0"` +} + +type SessionSeat struct { + SessionIDParam + ID int `json:"id" bson:"id,omitempty"` +} diff --git a/pkg/api/requests/setup.go b/pkg/api/requests/setup.go index 6ee1b6408ed..8c0db11f4a5 100644 --- a/pkg/api/requests/setup.go +++ b/pkg/api/requests/setup.go @@ -1,9 +1,8 @@ package requests type Setup struct { - Email string `json:"email" validate:"required,email"` - Name string `json:"name" validate:"required"` - Username string `json:"username" validate:"required"` - Password string `json:"password" validate:"required,min=5,max=30"` - Namespace string `json:"namespace" validate:"required,hostname_rfc1123,excludes=."` + Email string `json:"email" validate:"required,email"` + Name string `json:"name" validate:"required,name"` + Username string `json:"username" validate:"required,username"` + Password string `json:"password" validate:"required,password"` } diff --git a/pkg/api/requests/stats.go b/pkg/api/requests/stats.go new file mode 100644 index 00000000000..e54be20316b --- /dev/null +++ b/pkg/api/requests/stats.go @@ -0,0 +1,5 @@ +package requests + +type GetStats struct { + TenantID string `header:"X-Tenant-ID"` +} diff --git a/pkg/api/requests/system.go b/pkg/api/requests/system.go index 9c019b31512..8d9c9dc9da9 100644 --- a/pkg/api/requests/system.go +++ b/pkg/api/requests/system.go @@ -1,6 +1,6 @@ package requests -type SystemGetInfo struct { +type GetSystemInfo struct { Host string `header:"X-Forwarded-Host"` Port int `header:"X-Forwarded-Port"` } diff --git a/pkg/api/requests/tags.go b/pkg/api/requests/tags.go index cfebddfb276..78e0434d43b 100644 --- a/pkg/api/requests/tags.go +++ b/pkg/api/requests/tags.go @@ -1,5 +1,49 @@ package requests +import "github.com/shellhub-io/shellhub/pkg/api/query" + +type CreateTag struct { + TenantID string `param:"tenant" header:"X-Tenant-ID" validate:"required,uuid"` + Name string `json:"name" validate:"required,min=3,max=255,alphanum,ascii,excludes=/@&:"` +} + +type PushTag struct { + TenantID string `param:"tenant" header:"X-Tenant-ID" validate:"required,uuid"` + Name string `param:"name" validate:"required,min=3,max=255,alphanum,ascii,excludes=/@&:"` + // TargetID is the identifier of the target to push the tag on. + // For the reason cannot of it can be a list of things (UID for device, ID for firewall, etc...), it + // cannot be parsed and must be set manually + TargetID string `validate:"required"` +} + +type PullTag struct { + TenantID string `param:"tenant" header:"X-Tenant-ID" validate:"required,uuid"` + Name string `param:"name" validate:"required,min=3,max=255,alphanum,ascii,excludes=/@&:"` + // TargetID is the identifier of the target to pull the tag of. + // For the reason cannot of it can be a list of things (UID for device, ID for firewall, etc...), it + // cannot be parsed and must be set manually + TargetID string `validate:"required"` +} + +type ListTags struct { + TenantID string `param:"tenant" header:"X-Tenant-ID" validate:"required,uuid"` + query.Paginator + query.Filters + query.Sorter +} + +type UpdateTag struct { + TenantID string `param:"tenant" header:"X-Tenant-ID" validate:"required,uuid"` + Name string `param:"name" validate:"required"` + // Similar to [UpdateTag.Name], but is used to update the tag's name instead of retrieve the tag. + NewName string `json:"name" validate:"omitempty,min=3,max=255,alphanum,ascii,excludes=/@&:"` +} + +type DeleteTag struct { + TenantID string `param:"tenant" header:"X-Tenant-ID" validate:"required,uuid"` + Name string `param:"name" validate:"required"` +} + // TagParam is a structure to represent and validate a tag as path param. type TagParam struct { Tag string `param:"tag" validate:"required,min=3,max=255,alphanum,ascii,excludes=/@&:"` diff --git a/pkg/api/requests/user.go b/pkg/api/requests/user.go index fe220bca6a8..686d24c9253 100644 --- a/pkg/api/requests/user.go +++ b/pkg/api/requests/user.go @@ -1,16 +1,22 @@ // Package requests defines structures to represent requests' bodies from API. package requests +import "github.com/shellhub-io/shellhub/pkg/models" + type UserParam struct { ID string `param:"id" validate:"required"` } -// UserDataUpdate is the structure to represent the request body of the update user data endpoint. -type UserDataUpdate struct { - UserParam - Name string `json:"name" validate:"required,name"` - Username string `json:"username" validate:"required,username"` - Email string `json:"email" validate:"required,email"` +// UpdateUser is the structure to represent the request body of the update user data endpoint. +type UpdateUser struct { + UserID string `header:"X-ID" validate:"required"` + Name string `json:"name" validate:"omitempty,name"` + Username string `json:"username" validate:"omitempty,username"` + Email string `json:"email" validate:"omitempty,email"` + RecoveryEmail string `json:"recovery_email" validate:"omitempty,email"` + // Password is the new password. If not empty, [UserDataUpdate.CurrentPassword] must be the current user's password. + Password string `json:"password" validate:"omitempty,password"` + CurrentPassword string `json:"current_password"` } // UserPasswordUpdate is the structure to represent the request body for the update user password endpoint. @@ -20,8 +26,11 @@ type UserPasswordUpdate struct { NewPassword string `json:"new_password" validate:"required,password,nefield=CurrentPassword"` } -// UserAuth is the structure to represent the request body for the user auth endpoint. -type UserAuth struct { - Username string `json:"username" validate:"required"` - Password string `json:"password" validate:"required"` +// AuthLocalUser is the structure to represent the request body for the user auth endpoint. +type AuthLocalUser struct { + // Identifier represents an username or email. + // + // TODO: change json tag from username to identifier and update the OpenAPI. + Identifier models.UserAuthIdentifier `json:"username" validate:"required"` + Password string `json:"password" validate:"required"` } diff --git a/pkg/api/responses/api-key.go b/pkg/api/responses/api-key.go new file mode 100644 index 00000000000..e2aad20da11 --- /dev/null +++ b/pkg/api/responses/api-key.go @@ -0,0 +1,32 @@ +package responses + +import ( + "time" + + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/models" +) + +type CreateAPIKey struct { + ID string `json:"id" bson:"_id"` + Name string `json:"name" bson:"name"` + UserID string `json:"user_id" bson:"user_id"` + TenantID string `json:"tenant_id" bson:"tenant_id"` + Role authorizer.Role `json:"role" bson:"role" validate:"required,oneof=administrator operator observer"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` + UpdatedAt time.Time `json:"updated_at" bson:"updated_at"` + ExpiresIn int64 `json:"expires_in" bson:"expires_in"` +} + +func CreateAPIKeyFromModel(m *models.APIKey) *CreateAPIKey { + return &CreateAPIKey{ + ID: m.ID, + Name: m.Name, + UserID: m.CreatedBy, + TenantID: m.TenantID, + Role: m.Role, + CreatedAt: m.CreatedAt, + UpdatedAt: m.UpdatedAt, + ExpiresIn: m.ExpiresIn, + } +} diff --git a/pkg/cache/cache.go b/pkg/cache/cache.go index e62ba75812e..9a17e3d9b79 100644 --- a/pkg/cache/cache.go +++ b/pkg/cache/cache.go @@ -2,11 +2,73 @@ package cache import ( "context" + "errors" "time" ) +var ErrGetNotFound = errors.New("failed to find the value on the cache") + +func Get[T any](ctx context.Context, cache Cache, key string) (*T, error) { + var t *T + + if err := cache.Get(ctx, key, &t); err != nil { + return nil, err + } + + if t == nil { + return nil, ErrGetNotFound + } + + return t, nil +} + type Cache interface { Get(ctx context.Context, key string, value interface{}) error Set(ctx context.Context, key string, value interface{}, ttl time.Duration) error Delete(ctx context.Context, key string) error + + // HasAccountLockout reports whether the source is currently blocked from attempting to + // log in to a user with the specified userID. It returns the absolute Unix timestamp + // in seconds representing the end of the lockout, or 0 if no lockout was found; the + // attempt number and an error if any. + HasAccountLockout(ctx context.Context, source, userID string) (lockout int64, attempt int, err error) + + // StoreLoginAttempt stores a login attempt from source to the user with the specified userID. + // If the attempt number equals or exceeds 3, it sets a lockout for future login attempts. + // + // The lockout duration is calculated based on the number of attempts made, increasing exponentially + // by a factor of 4 after the third attempt. Attempts must last for half of the double lockout duration. + // + // This means that a user who was locked out for 4 minutes must have the attempts stored for 10 + // minutes (or 6 minutes after the timeout). Any wrong attempt within this time will increase the + // lockout once again. After this, the attempts will be reset, and new wrong attempts will start the + // attempt counter from 0. + // + // The following equations are used to calculate both lockout and attempt duration, with 'x' representing + // the lockout duration and 'y' the attempt duration: + // + // F(x) = min(4^(a - 3), M) + // F(y) = min(x * 2.5, M) + // + // Where: + // + // x is the lockout duration in minutes. + // y is the attempt duration in minutes. + // a is the attempt number. + // M is the maximum duration value, specified by the "SHELLHUB_MAXIMUM_ACCOUNT_LOCKOUT" environment variable. + // + // Examples for M = 32768 (15 days) and a = n: + // + // n = 3 | 4 | 5 | 8 | 11 + // _________________________________ + // F(x) = 1 | 4 | 16 | 1024 | 32768 + // F(y) = 3 | 10 | 40 | 2560 | 32768 + // + // It returns the absolute Unix timestamp in seconds representing the end of the lockout, or 0 if no + // lockout was found; the attempt number and an error if any. + StoreLoginAttempt(ctx context.Context, source, userID string) (lockout int64, attempt int, err error) + + // ResetLoginAttempts resets the login attempts and associated lockout from the source to + // the user with the specified userID. + ResetLoginAttempts(ctx context.Context, source, userID string) error } diff --git a/pkg/cache/cache_null.go b/pkg/cache/cache_null.go index d12c9fe6963..ae3c4d4b97a 100644 --- a/pkg/cache/cache_null.go +++ b/pkg/cache/cache_null.go @@ -13,14 +13,26 @@ func NewNullCache() Cache { return &nullCache{} } -func (n *nullCache) Get(_ context.Context, _ string, _ interface{}) error { +func (*nullCache) Get(_ context.Context, _ string, _ interface{}) error { return nil } -func (n *nullCache) Set(_ context.Context, _ string, _ interface{}, _ time.Duration) error { +func (*nullCache) Set(_ context.Context, _ string, _ interface{}, _ time.Duration) error { return nil } -func (n *nullCache) Delete(_ context.Context, _ string) error { +func (*nullCache) Delete(_ context.Context, _ string) error { + return nil +} + +func (*nullCache) HasAccountLockout(_ context.Context, _, _ string) (int64, int, error) { + return 0, 0, nil +} + +func (*nullCache) StoreLoginAttempt(_ context.Context, _, _ string) (int64, int, error) { + return 0, 0, nil +} + +func (*nullCache) ResetLoginAttempts(_ context.Context, _, _ string) error { return nil } diff --git a/pkg/cache/cache_redis.go b/pkg/cache/cache_redis.go index 3ffc16be9ee..d4ea271c567 100644 --- a/pkg/cache/cache_redis.go +++ b/pkg/cache/cache_redis.go @@ -2,26 +2,41 @@ package cache import ( "context" + "math" + "strconv" "time" rediscache "github.com/go-redis/cache/v8" "github.com/go-redis/redis/v8" + "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/envs" + log "github.com/sirupsen/logrus" ) type redisCache struct { cache *rediscache.Cache + cfg *config } var _ Cache = &redisCache{} -// NewRedisCache creates and returns a new redis cache. -func NewRedisCache(uri string) (Cache, error) { +func NewRedisCache(uri string, pool int) (Cache, error) { opt, err := redis.ParseURL(uri) if err != nil { return nil, err } + if pool > 0 { + opt.PoolSize = pool + } + + cfg, err := envs.ParseWithPrefix[config]("API_") + if err != nil { + log.WithError(err).Fatal("Failed to load environment variables") + } + return &redisCache{ + cfg: cfg, cache: rediscache.New(&rediscache.Options{ Redis: redis.NewClient(opt), }), @@ -52,3 +67,73 @@ func (c *redisCache) Delete(ctx context.Context, key string) error { return c.cache.Delete(ctx, key) } + +func (c *redisCache) HasAccountLockout(ctx context.Context, source, id string) (int64, int, error) { + if c.cfg.MaximumAccountLockout <= 0 { + return 0, 0, nil + } + + lockoutSTR := "0" + if err := c.Get(ctx, "account-lockout="+source+":"+id, &lockoutSTR); err != nil { + return 0, 0, err + } + + attemptSTR := "0" + if err := c.Get(ctx, "login-attempt="+source+":"+id, &attemptSTR); err != nil { + return 0, 0, err + } + + lockout, _ := strconv.ParseInt(lockoutSTR, 10, 0) + attempt, _ := strconv.Atoi(attemptSTR) + + return lockout, attempt, nil +} + +func (c *redisCache) StoreLoginAttempt(ctx context.Context, source, id string) (int64, int, error) { + if c.cfg.MaximumAccountLockout <= 0 { + return 0, 0, nil + } + + attemptSTR := "0" + if err := c.Get(ctx, "login-attempt="+source+":"+id, &attemptSTR); err != nil { + return 0, 0, err + } + + attempt, _ := strconv.Atoi(attemptSTR) + attempt++ + + now := clock.Now() + tmp := math.Min(math.Pow(4, float64(attempt-3)), float64(c.cfg.MaximumAccountLockout)) + + attemptTTL := time.Duration(math.Max(math.Min(tmp*2.5, float64(c.cfg.MaximumAccountLockout)), 2)) * time.Minute + if err := c.Set(ctx, "login-attempt="+source+":"+id, strconv.Itoa(attempt), attemptTTL); err != nil { + return 0, attempt, err + } + + if attempt <= 2 { + return 0, attempt, nil + } + + // We save 'lockoutTTL' as an absolute lockoutStr to help with time handling + lockoutTTL := time.Duration(tmp) * time.Minute + lockoutSTR := strconv.FormatInt(now.Add(lockoutTTL).Unix(), 10) + if err := c.Set(ctx, "account-lockout="+source+":"+id, lockoutSTR, lockoutTTL); err != nil { + return 0, attempt, err + } + + lockout, _ := strconv.ParseInt(lockoutSTR, 10, 0) + + return lockout, attempt, nil +} + +func (c *redisCache) ResetLoginAttempts(ctx context.Context, source, id string) error { + if c.cfg.MaximumAccountLockout <= 0 { + return nil + } + + if err := c.Delete(ctx, "login-attempt="+source+":"+id); err != nil { + return err + } + + return c.Delete(ctx, "account-lockout="+source+":"+id) +} diff --git a/pkg/cache/cache_test.go b/pkg/cache/cache_test.go new file mode 100644 index 00000000000..c306e050e24 --- /dev/null +++ b/pkg/cache/cache_test.go @@ -0,0 +1,89 @@ +package cache + +import ( + "context" + "errors" + "testing" + + cacheMock "github.com/shellhub-io/shellhub/pkg/cache/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestGet(t *testing.T) { + type Test struct { + Value bool + } + + cache := new(cacheMock.Cache) + + type Expected struct { + value *Test + err error + } + + tests := []struct { + description string + key string + setup func() + expected Expected + }{ + { + description: "fail to get the cache due a error", + key: "fail_with_error", + setup: func() { + cache.On("Get", mock.Anything, "fail_with_error", mock.Anything). + Return(errors.New("failed with error")). + Once() + }, + expected: Expected{nil, errors.New("failed with error")}, + }, + { + description: "fail to find this vaue from the cache", + key: "fail_when_not_found", + setup: func() { + cache.On("Get", mock.Anything, "fail_when_not_found", mock.Anything). + Return(nil). + Run(func(args mock.Arguments) { + value := args.Get(2).(**Test) + (*value) = nil + }). + Once() + }, + expected: Expected{nil, ErrGetNotFound}, + }, + { + description: "success to get data from cache", + key: "success", + setup: func() { + cache.On("Get", mock.Anything, "success", mock.Anything). + Return(nil). + Run(func(args mock.Arguments) { + value := args.Get(2).(**Test) + (*value) = &Test{ + Value: true, + } + }).Once() + }, + expected: Expected{&Test{ + Value: true, + }, nil}, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + ctx := context.Background() + + test.setup() + + value, err := Get[Test](ctx, cache, test.key) + assert.Equal(t, test.expected, Expected{ + value: value, + err: err, + }) + }) + } + + cache.AssertExpectations(t) +} diff --git a/pkg/cache/config.go b/pkg/cache/config.go new file mode 100644 index 00000000000..a0a0dd12d42 --- /dev/null +++ b/pkg/cache/config.go @@ -0,0 +1,7 @@ +package cache + +type config struct { + // Specifies the maximum duration in minutes for which a user can be blocked from login attempts. + // The default value is 60, equivalent to 1 hour. + MaximumAccountLockout int `env:"MAXIMUM_ACCOUNT_LOCKOUT,default=60"` +} diff --git a/pkg/cache/mocks/cache.go b/pkg/cache/mocks/cache.go new file mode 100644 index 00000000000..76da80be47a --- /dev/null +++ b/pkg/cache/mocks/cache.go @@ -0,0 +1,171 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + time "time" + + mock "github.com/stretchr/testify/mock" +) + +// Cache is an autogenerated mock type for the Cache type +type Cache struct { + mock.Mock +} + +// Delete provides a mock function with given fields: ctx, key +func (_m *Cache) Delete(ctx context.Context, key string) error { + ret := _m.Called(ctx, key) + + if len(ret) == 0 { + panic("no return value specified for Delete") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, key) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Get provides a mock function with given fields: ctx, key, value +func (_m *Cache) Get(ctx context.Context, key string, value interface{}) error { + ret := _m.Called(ctx, key, value) + + if len(ret) == 0 { + panic("no return value specified for Get") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, interface{}) error); ok { + r0 = rf(ctx, key, value) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// HasAccountLockout provides a mock function with given fields: ctx, source, userID +func (_m *Cache) HasAccountLockout(ctx context.Context, source string, userID string) (int64, int, error) { + ret := _m.Called(ctx, source, userID) + + if len(ret) == 0 { + panic("no return value specified for HasAccountLockout") + } + + var r0 int64 + var r1 int + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) (int64, int, error)); ok { + return rf(ctx, source, userID) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string) int64); ok { + r0 = rf(ctx, source, userID) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string) int); ok { + r1 = rf(ctx, source, userID) + } else { + r1 = ret.Get(1).(int) + } + + if rf, ok := ret.Get(2).(func(context.Context, string, string) error); ok { + r2 = rf(ctx, source, userID) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// ResetLoginAttempts provides a mock function with given fields: ctx, source, userID +func (_m *Cache) ResetLoginAttempts(ctx context.Context, source string, userID string) error { + ret := _m.Called(ctx, source, userID) + + if len(ret) == 0 { + panic("no return value specified for ResetLoginAttempts") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { + r0 = rf(ctx, source, userID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Set provides a mock function with given fields: ctx, key, value, ttl +func (_m *Cache) Set(ctx context.Context, key string, value interface{}, ttl time.Duration) error { + ret := _m.Called(ctx, key, value, ttl) + + if len(ret) == 0 { + panic("no return value specified for Set") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, interface{}, time.Duration) error); ok { + r0 = rf(ctx, key, value, ttl) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// StoreLoginAttempt provides a mock function with given fields: ctx, source, userID +func (_m *Cache) StoreLoginAttempt(ctx context.Context, source string, userID string) (int64, int, error) { + ret := _m.Called(ctx, source, userID) + + if len(ret) == 0 { + panic("no return value specified for StoreLoginAttempt") + } + + var r0 int64 + var r1 int + var r2 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) (int64, int, error)); ok { + return rf(ctx, source, userID) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string) int64); ok { + r0 = rf(ctx, source, userID) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string) int); ok { + r1 = rf(ctx, source, userID) + } else { + r1 = ret.Get(1).(int) + } + + if rf, ok := ret.Get(2).(func(context.Context, string, string) error); ok { + r2 = rf(ctx, source, userID) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// NewCache creates a new instance of Cache. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewCache(t interface { + mock.TestingT + Cleanup(func()) +}) *Cache { + mock := &Cache{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/connman/connman.go b/pkg/connman/connman.go deleted file mode 100644 index 2c575a83d27..00000000000 --- a/pkg/connman/connman.go +++ /dev/null @@ -1,77 +0,0 @@ -package connman - -import ( - "context" - "errors" - "net" - - "github.com/shellhub-io/shellhub/pkg/revdial" - "github.com/shellhub-io/shellhub/pkg/wsconnadapter" - "github.com/sirupsen/logrus" -) - -var ErrNoConnection = errors.New("no connection") - -type ConnectionManager struct { - dialers *SyncSliceMap - DialerDoneCallback func(string, *revdial.Dialer) - DialerKeepAliveCallback func(string, *revdial.Dialer) -} - -func New() *ConnectionManager { - return &ConnectionManager{ - dialers: &SyncSliceMap{}, - DialerDoneCallback: func(string, *revdial.Dialer) { - }, - } -} - -func (m *ConnectionManager) Set(key string, conn *wsconnadapter.Adapter) { - dialer := revdial.NewDialer(conn, "/ssh/revdial") - - m.dialers.Store(key, dialer) - - if size := m.dialers.Size(key); size > 1 { - logrus.WithFields(logrus.Fields{ - "key": key, - "size": size, - }).Warning("Multiple connections stored for the same identifier.") - } - - m.DialerKeepAliveCallback(key, dialer) - - // Start the ping loop and get the channel for pong responses - pong := conn.Ping() - - go func() { - for { - select { - case <-pong: - m.DialerKeepAliveCallback(key, dialer) - - continue - case <-dialer.Done(): - m.dialers.Delete(key, dialer) - m.DialerDoneCallback(key, dialer) - - return - } - } - }() -} - -func (m *ConnectionManager) Dial(ctx context.Context, key string) (net.Conn, error) { - dialer, ok := m.dialers.Load(key) - if !ok { - return nil, ErrNoConnection - } - - if size := m.dialers.Size(key); size > 1 { - logrus.WithFields(logrus.Fields{ - "key": key, - "size": size, - }).Warning("Multiple connections found for the same identifier during reverse tunnel dialing.") - } - - return dialer.(*revdial.Dialer).Dial(ctx) -} diff --git a/pkg/envs/envs.go b/pkg/envs/envs.go index b6c7260a027..620f4d346a1 100644 --- a/pkg/envs/envs.go +++ b/pkg/envs/envs.go @@ -42,11 +42,6 @@ func IsDevelopment() bool { return DefaultBackend.Get("SHELLHUB_ENV") == "development" } -// HasBilling returns true if the current ShellHub server instance has billing feature enabled. -func HasBilling() bool { - return DefaultBackend.Get("SHELLHUB_BILLING") == ENABLED -} - var ErrParseWithPrefix = errors.New("failed to parse environment variables for the given prefix") // ParseWithPrefix parses the environment variables for the a given prefix. diff --git a/pkg/geoip/geolite2.go b/pkg/geoip/geolite2.go deleted file mode 100644 index 90209603e85..00000000000 --- a/pkg/geoip/geolite2.go +++ /dev/null @@ -1,183 +0,0 @@ -// Package geoip helps in geolocation operations. -package geoip - -import ( - "errors" - "fmt" - "io" - "io/fs" - "net" - "net/http" - "os" - "path/filepath" - "regexp" - - archiver "github.com/mholt/archiver/v3" - geoip2 "github.com/oschwald/geoip2-golang" -) - -// dbPath is the default path for Database. -var dbPath = "/usr/share/GeoIP/" - -const ( - // city is used to access DB's connection to GeoLite2-City. - city = iota - // country is used to access DB's connection to GeoLite2-Country. - country -) - -// geoLite2Info contains data about which geoLite2's databases are used. -var geoLite2Info = []map[string]string{ - {"type": "City", "file": "GeoLite2-City.mmdb"}, - {"type": "Country", "file": "GeoLite2-Country.mmdb"}, -} - -// Check if geoLite2 implements Locator interface. -var _ Locator = (*geoLite2)(nil) - -// Check if geoLite2 implements io.Closer interface. -var _ io.Closer = (*geoLite2)(nil) - -// geoLite2 is a structure what stores a geoIp2Reader to a GeoIp2 database. -type geoLite2 struct { - db []*geoip2.Reader -} - -// downloadGeoLite2Db downloads the GeoLite2 databases and extract the files into the dbPath. -func downloadGeoLite2Db(maxmindDBLicense, maxmindDBType string) error { - // Download the GeoLite2Db .tar.gz file with the database inside it. - r, err := http.Get(fmt.Sprintf("https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-%s&license_key=%s&suffix=tar.gz", maxmindDBType, maxmindDBLicense)) - if err != nil { - return err - } - - // Create a temporary directory to untar downloaded .tar.gz with database. - tempDir, err := os.MkdirTemp("", "geoip") - if err != nil { - return err - } - - // Delete temporary directory. - defer func(tempDir string) { - err := os.RemoveAll(tempDir) - if err != nil { - return - } - }(tempDir) - if err != nil { - return errors.New("unable to create temporary directory to download GeoLite2 database") - } - - // Create a temporary file to store downloaded .tar.gz with database. - tempFile, err := os.CreateTemp("", "geoip*.tar.gz") - // Delete temporary file. - defer func(tempFile *os.File) { - err := os.Remove(tempFile.Name()) - if err != nil { - return - } - }(tempFile) - if err != nil { - return err - } - - // Copy bytes from downloaded file to temporary file. - _, err = io.Copy(tempFile, r.Body) - if err != nil { - return err - } - - // Untar the downloaded file to the temporary directory. - err = archiver.Unarchive(tempFile.Name(), tempDir) - if err != nil { - return err - } - - // Create the path to move decompressed database file. - err = os.MkdirAll(dbPath, 0o755) - if err != nil { - return err - } - // Find geoip.geoLite2DbName inside the tempDir. - err = filepath.Walk(tempDir, func(p string, i fs.FileInfo, err error) error { - if ok, _ := regexp.MatchString("GeoLite2-([a-zA-z]+)\\.mmdb", i.Name()); ok { - // Move from temporary directory to geoip.geoLite2DbName to geoip.dbPath. - err := os.Rename(p, dbPath+i.Name()) - if err != nil { - return err - } - } - - return nil - }) - if err != nil { - return err - } - - return nil -} - -// NewGeoLite2 opens connections to GeoIp2 databases and return a geoLite2 structure with the databases connections. -// -// The connection uses the local database or try to download it from MaxMind's server (to download, it is required `MAXMIND_LICENSE` set). -func NewGeoLite2() (Locator, error) { - for _, info := range geoLite2Info { - if _, err := os.Stat(dbPath + info["file"]); os.IsNotExist(err) { - if license, ok := os.LookupEnv("MAXMIND_LICENSE"); ok { - err := downloadGeoLite2Db(license, info["type"]) - if err != nil { - return nil, err - } - } else { - return nil, errors.New("geoip feature is enable, but MAXMIND_LICENSE is not set") - } - } - } - - geolite2Db := new(geoLite2) - for _, info := range geoLite2Info { - db, err := geoip2.Open(dbPath + info["file"]) - if err != nil { - return nil, err - } - - geolite2Db.db = append(geolite2Db.db, db) - } - - return geolite2Db, nil -} - -// Close the connection with the GeoLite2 database, returning either error or nil. -func (g *geoLite2) Close() error { - for i := range geoLite2Info { - err := g.db[i].Close() - if err != nil { - return err - } - } - - return nil -} - -// GetCountry gets an ip and return either an ISO 3166-1 code to a country or an empty string. -func (g *geoLite2) GetCountry(ip net.IP) (string, error) { - record, err := g.db[country].Country(ip) - if err != nil { - return "", err - } - - return record.Country.IsoCode, nil -} - -// GetPosition gets an ip and return a Position structure with Longitude and Latitude with error nil or an empty Position structure with the error. -func (g *geoLite2) GetPosition(ip net.IP) (Position, error) { - record, err := g.db[city].City(ip) - if err != nil { - return Position{}, err - } - - return Position{ - Longitude: record.Location.Longitude, - Latitude: record.Location.Latitude, - }, nil -} diff --git a/pkg/geoip/geolite2/fetcher.go b/pkg/geoip/geolite2/fetcher.go new file mode 100644 index 00000000000..053bc368da5 --- /dev/null +++ b/pkg/geoip/geolite2/fetcher.go @@ -0,0 +1,73 @@ +package geolite2 + +import ( + "context" + "errors" + "io/fs" + "net/url" + "os" + "path/filepath" +) + +// GeoliteFetcher defines a function type for asynchronously fetching and downloading GeoIP databases. +type GeoliteFetcher func(context.Context) error + +// FetchFromLicenseKey initializes a GeoipFetcher that downloads the GeoLite2 database files +// from MaxMind, using the provided license key to authenticate. +func FetchFromLicenseKey(licenseKey string) GeoliteFetcher { + return func(ctx context.Context) error { + urls := []string{} + for _, id := range []string{dbCountryID, dbCityID} { + _, err := os.Stat(filepath.Join(dbPath, id+dbExtension)) + switch { + case errors.Is(err, fs.ErrNotExist): + query := url.Values{} + query.Add("suffix", "tar.gz") + query.Add("license_key", licenseKey) + query.Add("editon_id", id) + + urls = append(urls, "https://download.maxmind.com/app/geoip_download?"+query.Encode()) + default: + return err + } + } + + if len(urls) > 0 { + if err := fetchDBs(ctx, urls); err != nil { + return err + } + } + + return nil + } +} + +func FetchFromMirror(mirror string) GeoliteFetcher { + return func(ctx context.Context) error { + urls := []string{} + for _, id := range []string{dbCountryID, dbCityID} { + _, err := os.Stat(filepath.Join(dbPath, id+dbExtension)) + switch { + case errors.Is(err, fs.ErrNotExist): + u, err := url.Parse(mirror) + if err != nil { + return err + } + + u.Path = "/" + id + ".tar.gz" + + urls = append(urls, u.String()) + default: + return err + } + } + + if len(urls) > 0 { + if err := fetchDBs(ctx, urls); err != nil { + return err + } + } + + return nil + } +} diff --git a/pkg/geoip/geolite2/locator.go b/pkg/geoip/geolite2/locator.go new file mode 100644 index 00000000000..02627a5aada --- /dev/null +++ b/pkg/geoip/geolite2/locator.go @@ -0,0 +1,90 @@ +package geolite2 + +import ( + "context" + "io" + "net" + "path/filepath" + + "github.com/oschwald/geoip2-golang" + "github.com/shellhub-io/shellhub/pkg/geoip" +) + +const ( + dbPath = "/usr/share/GeoIP/" // Directory where the GeoIP database files are stored. + dbCountryID = "GeoLite2-Country" // GeoLite2 country database filename without extension. + dbCityID = "GeoLite2-City" // GeoLite2 city database filename without extension. + dbExtension = ".mmdb" // Database file extension. +) + +type geoLite2 struct { + countryDB *geoip2.Reader + cityDB *geoip2.Reader +} + +// Check if geoLite2 implements the geoip.Locator and io.Closer interfaces. +var ( + _ geoip.Locator = (*geoLite2)(nil) + _ io.Closer = (*geoLite2)(nil) +) + +// NewLocator initializes a new geoip.Locator by setting up access to the GeoIP databases. +// If the databases do not exist locally, they will be downloaded using the provided fetcher method. +func NewLocator(ctx context.Context, fetcher GeoliteFetcher) (geoip.Locator, error) { + if err := ensureDatabasePath(); err != nil { + return nil, err + } + + if err := fetcher(ctx); err != nil { + return nil, err + } + + countryDB, err := geoip2.Open(filepath.Join(dbPath, dbCountryID+dbExtension)) + if err != nil { + return nil, err + } + + cityDB, err := geoip2.Open(filepath.Join(dbPath, dbCityID+dbExtension)) + if err != nil { + countryDB.Close() + + return nil, err + } + + return &geoLite2{countryDB: countryDB, cityDB: cityDB}, nil +} + +func (g *geoLite2) GetCountry(ip net.IP) (string, error) { + record, err := g.countryDB.Country(ip) + if err != nil { + return "", err + } + + return record.Country.IsoCode, nil +} + +func (g *geoLite2) GetPosition(ip net.IP) (geoip.Position, error) { + record, err := g.cityDB.City(ip) + if err != nil { + return geoip.Position{}, err + } + + pos := geoip.Position{ + Longitude: record.Location.Longitude, + Latitude: record.Location.Latitude, + } + + return pos, nil +} + +func (g *geoLite2) Close() error { + if err := g.countryDB.Close(); err != nil { + return err + } + + if err := g.cityDB.Close(); err != nil { //nolint:revive + return err + } + + return nil +} diff --git a/pkg/geoip/geolite2/utils.go b/pkg/geoip/geolite2/utils.go new file mode 100644 index 00000000000..f33c4aa6634 --- /dev/null +++ b/pkg/geoip/geolite2/utils.go @@ -0,0 +1,86 @@ +package geolite2 + +import ( + "bytes" + "context" + "errors" + "io" + "net/http" + "os" + "path/filepath" + "strings" + + "github.com/go-resty/resty/v2" + "github.com/mholt/archiver/v4" + "golang.org/x/sync/errgroup" +) + +// ensureDatabasePath ensures that [dbPath] exists and creates if not. +func ensureDatabasePath() error { + if err := os.MkdirAll(dbPath, 0o755); err != nil { + return errors.New("failed to create dbPath: " + err.Error()) + } + + return nil +} + +// fetchDBs concurrently downloads the GeoIP database files from the provided URLs and extracts +// them to [dbPath]. It will halt and return an error if any download or extraction fails. +func fetchDBs(ctx context.Context, urls []string) error { + g, ctx := errgroup.WithContext(ctx) + + for _, url := range urls { + g.Go(fetchDB(ctx, url)) + } + + return g.Wait() +} + +// fetchDB downalods the Geolite database file from the provided URL and extracts them to [dbPath]. +func fetchDB(ctx context.Context, url string) func() error { + return func() error { + r, err := resty.New().R().Get(url) + if err != nil { + return err + } + + if r.StatusCode() != http.StatusOK { + return errors.New("cannot download geolite db: status " + r.Status()) + } + + format := archiver.CompressedArchive{Compression: archiver.Gz{}, Archival: archiver.Tar{}} + if err := format.Extract(ctx, bytes.NewReader(r.Body()), nil, saveDB()); err != nil { //nolint:revive + return err + } + + return nil + } +} + +// saveDB saves extracted GeoLite2 database files to [dbPath]. +// Only files with the expected [dbExtension] will be saved. +func saveDB() archiver.FileHandler { + return func(_ context.Context, f archiver.File) error { + if !strings.HasSuffix(f.Name(), dbExtension) { + return nil + } + + srcFile, err := f.Open() + if err != nil { + return err + } + defer srcFile.Close() + + destPath := filepath.Join(dbPath, f.Name()) + + outFile, err := os.Create(destPath) + if err != nil { + return err + } + defer outFile.Close() + + _, err = io.Copy(outFile, srcFile) + + return err + } +} diff --git a/pkg/geoip/locator.go b/pkg/geoip/locator.go index 166fcaf2042..d1fd2093295 100644 --- a/pkg/geoip/locator.go +++ b/pkg/geoip/locator.go @@ -3,6 +3,11 @@ package geoip import "net" type Locator interface { + // GetCountry retrieves the ISO country code for a given IP address. + // Returns an error if the IP lookup fails. GetCountry(ip net.IP) (string, error) + + // GetPosition retrieves the geographical [Position] for a given IP + // address. Returns an error if the IP lookup fails. GetPosition(ip net.IP) (Position, error) } diff --git a/pkg/hash/backend.go b/pkg/hash/backend.go new file mode 100644 index 00000000000..30074922add --- /dev/null +++ b/pkg/hash/backend.go @@ -0,0 +1,32 @@ +package hash + +import ( + "crypto/sha256" + "encoding/hex" + "strings" + + "golang.org/x/crypto/bcrypt" +) + +type backend struct{} + +func (p *backend) Do(plain string) (string, error) { + minCost := 10 + + hash, err := bcrypt.GenerateFromPassword([]byte(plain), minCost) + if err != nil { + return "", err + } + + return string(hash), nil +} + +func (p *backend) CompareWith(plain string, hash string) bool { + if !strings.HasPrefix(hash, "$") { + sha := sha256.Sum256([]byte(plain)) + + return hash == hex.EncodeToString(sha[:]) + } + + return bcrypt.CompareHashAndPassword([]byte(hash), []byte(plain)) == nil +} diff --git a/pkg/hash/hash.go b/pkg/hash/hash.go new file mode 100644 index 00000000000..e37b27d4227 --- /dev/null +++ b/pkg/hash/hash.go @@ -0,0 +1,22 @@ +package hash + +type Hasher interface { + Do(plain string) (string, error) // Hash takes a string and returns its hash. + CompareWith(plain string, hash string) bool // Compare takes a plaintext password and a hashed password, then checks if they match. +} + +var Backend Hasher = &backend{} + +// Do takes a string and returns its hash. +func Do(pwd string) (string, error) { + return Backend.Do(pwd) +} + +// CompareWith reports whether a plain text matches with hash. +// +// For compatibility purposes, it can compare using both SHA256 and bcrypt algorithms. +// Hashes starting with "$" are assumed to be a bcrypt hash; otherwise, they are treated as +// SHA256 hashes. +func CompareWith(plain string, hash string) bool { + return Backend.CompareWith(plain, hash) +} diff --git a/pkg/hash/hash_test.go b/pkg/hash/hash_test.go new file mode 100644 index 00000000000..0ffe999f23d --- /dev/null +++ b/pkg/hash/hash_test.go @@ -0,0 +1,67 @@ +package hash + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestHash(t *testing.T) { + cases := []struct { + description string + password string + }{ + { + description: "succeeds when create a hash", + password: "secret", + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + hash, err := Do(tc.password) + assert.NoError(t, err) + assert.NotEqual(t, hash, "") + }) + } +} + +func TestCompare(t *testing.T) { + cases := []struct { + description string + password string + hash string + expected bool + }{ + { + description: "should fail when the password is incorrect and hashed using SHA256", + password: "invalid", + hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + expected: false, + }, + { + description: "should succeed when the password is correct and hashed using SHA256", + password: "secret", + hash: "2bb80d537b1da3e38bd30361aa855686bde0eacd7162fef6a25fe97bf527a25b", + expected: true, + }, + { + description: "should fail when the password is incorrect and hashed using bcrypt", + password: "invalid", + hash: "$2a$14$QPfofG/FHXFaRMiMjIgo8uHgJSj/zghR9abxEO6JmBu/rViSDNo.K", + expected: false, + }, + { + description: "should succeed when the password is correct and hashed using bcrypt", + password: "secret", + hash: "$2a$14$QPfofG/FHXFaRMiMjIgo8uHgJSj/zghR9abxEO6JmBu/rViSDNo.K", + expected: true, + }, + } + + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + assert.Equal(t, tc.expected, CompareWith(tc.password, tc.hash)) + }) + } +} diff --git a/pkg/hash/mocks/hash.go b/pkg/hash/mocks/hash.go new file mode 100644 index 00000000000..912a8844547 --- /dev/null +++ b/pkg/hash/mocks/hash.go @@ -0,0 +1,70 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// Hasher is an autogenerated mock type for the Hasher type +type Hasher struct { + mock.Mock +} + +// CompareWith provides a mock function with given fields: plain, _a1 +func (_m *Hasher) CompareWith(plain string, _a1 string) bool { + ret := _m.Called(plain, _a1) + + if len(ret) == 0 { + panic("no return value specified for CompareWith") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(string, string) bool); ok { + r0 = rf(plain, _a1) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// Do provides a mock function with given fields: plain +func (_m *Hasher) Do(plain string) (string, error) { + ret := _m.Called(plain) + + if len(ret) == 0 { + panic("no return value specified for Do") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(string) (string, error)); ok { + return rf(plain) + } + if rf, ok := ret.Get(0).(func(string) string); ok { + r0 = rf(plain) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(plain) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewHasher creates a new instance of Hasher. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewHasher(t interface { + mock.TestingT + Cleanup(func()) +}) *Hasher { + mock := &Hasher{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/httptunnel/httptunnel.go b/pkg/httptunnel/httptunnel.go deleted file mode 100644 index 001983868b9..00000000000 --- a/pkg/httptunnel/httptunnel.go +++ /dev/null @@ -1,127 +0,0 @@ -package httptunnel - -import ( - "bufio" - "context" - "io" - "net" - "net/http" - - "github.com/gorilla/websocket" - "github.com/labstack/echo/v4" - "github.com/shellhub-io/shellhub/pkg/connman" - "github.com/shellhub-io/shellhub/pkg/revdial" - "github.com/shellhub-io/shellhub/pkg/wsconnadapter" -) - -var upgrader = websocket.Upgrader{ - ReadBufferSize: 1024, - WriteBufferSize: 1024, - Subprotocols: []string{"binary"}, - CheckOrigin: func(r *http.Request) bool { - return true - }, -} - -const ( - DefaultConnectionURL = "/connection" - DefaultRevdialURL = "/revdial" -) - -type Tunnel struct { - ConnectionPath string - DialerPath string - ConnectionHandler func(*http.Request) (string, error) - CloseHandler func(string) - KeepAliveHandler func(string) - connman *connman.ConnectionManager - id chan string - online chan bool -} - -func NewTunnel(connectionPath, dialerPath string) *Tunnel { - tunnel := &Tunnel{ - ConnectionPath: connectionPath, - DialerPath: dialerPath, - ConnectionHandler: func(r *http.Request) (string, error) { - panic("ConnectionHandler not implemented") - }, - CloseHandler: func(string) { - }, - KeepAliveHandler: func(string) { - }, - connman: connman.New(), - id: make(chan string), - online: make(chan bool), - } - - tunnel.connman.DialerDoneCallback = func(id string, _ *revdial.Dialer) { - tunnel.CloseHandler(id) - } - - tunnel.connman.DialerKeepAliveCallback = func(id string, _ *revdial.Dialer) { - tunnel.KeepAliveHandler(id) - } - - return tunnel -} - -func (t *Tunnel) Router() http.Handler { - e := echo.New() - - e.GET(t.ConnectionPath, func(c echo.Context) error { - conn, err := upgrader.Upgrade(c.Response(), c.Request(), nil) - if err != nil { - return c.String(http.StatusInternalServerError, err.Error()) - } - - id, err := t.ConnectionHandler(c.Request()) - if err != nil { - conn.Close() - - return c.String(http.StatusBadRequest, err.Error()) - } - - t.connman.Set(id, wsconnadapter.New(conn)) - - return nil - }) - - e.GET(t.DialerPath, echo.WrapHandler(revdial.ConnHandler(upgrader))) - - return e -} - -func (t *Tunnel) Dial(ctx context.Context, id string) (net.Conn, error) { - return t.connman.Dial(ctx, id) -} - -func (t *Tunnel) SendRequest(ctx context.Context, id string, req *http.Request) (*http.Response, error) { - conn, err := t.connman.Dial(ctx, id) - if err != nil { - return nil, err - } - - if err := req.Write(conn); err != nil { - return nil, err - } - - resp, err := http.ReadResponse(bufio.NewReader(conn), req) - if err != nil { - return nil, err - } - - return resp, nil -} - -func (t *Tunnel) ForwardResponse(resp *http.Response, w http.ResponseWriter) { - for key, values := range resp.Header { - for _, value := range values { - w.Header().Add(key, value) - } - } - - w.WriteHeader(resp.StatusCode) - io.Copy(w, resp.Body) // nolint:errcheck - resp.Body.Close() -} diff --git a/pkg/loglevel/loglevel.go b/pkg/loglevel/loglevel.go index b68bbaa509a..8899b3cfc6d 100644 --- a/pkg/loglevel/loglevel.go +++ b/pkg/loglevel/loglevel.go @@ -5,19 +5,58 @@ import ( "github.com/sirupsen/logrus" ) -func SetLogLevel() { - level := logrus.InfoLevel +// UseEnvs set the logger level to the specified in SHELLHUB_LOG_LEVEL and the log format for SHELLHUB_LOG_FORMAT. +func UseEnvs() { + SetLogFormat() + SetLogLevel() +} +// TODO: "set" on the name doesn't make sense, as it isn't receiving nothing to set. In my view, "use" could be a better +// naming, as it will "use" the environmental variable. +func SetLogLevel() { if envs.DefaultBackend.Get("SHELLHUB_ENV") == "development" { - level = logrus.TraceLevel + logrus.SetLevel(logrus.TraceLevel) + logrus.Info("SHELLHUB_LOG_LEVEL set to TRACE due SHELLHUB_ENV in development") } - if env := envs.DefaultBackend.Get("SHELLHUB_LOG_LEVEL"); env != "" { - if v, err := logrus.ParseLevel(env); err == nil { - level = v + if level := envs.DefaultBackend.Get("SHELLHUB_LOG_LEVEL"); level != "" { + l, err := logrus.ParseLevel(level) + if err != nil { + logrus.SetLevel(logrus.InfoLevel) + logrus.WithField("SHELLHUB_LOG_LEVEL", logrus.InfoLevel).Error("using INFO as log level due SHELLHUB_LOG_LEVEL invalid value") + + return } + + logrus.SetLevel(l) + logrus.WithField("SHELLHUB_LOG_LEVEL", l).Info("using SHELLHUB_LOG_LEVEL") } +} - logrus.WithField("log_level", level.String()).Info("Setting log level") - logrus.SetLevel(level) +// LogFormat represents how logger should print logs. +type LogFormat string + +const ( + // LogFormatJSON format for JSON log. + LogFormatJSON LogFormat = "json" + // LogFormatText format for text log. + LogFormatText LogFormat = "text" +) + +// SetLogFormat sets the default format for the logger. +// TODO: "set" on the name doesn't make sense, as it isn't receiving nothing to set. In my view, "use" could be a better +// naming, as it will "use" the environmental variable. +func SetLogFormat() { + if format := envs.DefaultBackend.Get("SHELLHUB_LOG_FORMAT"); format != "" { + switch LogFormat(format) { + case LogFormatJSON: + logrus.SetFormatter(new(logrus.JSONFormatter)) + case LogFormatText: + logrus.SetFormatter(new(logrus.TextFormatter)) + default: + logrus.SetFormatter(new(logrus.TextFormatter)) + + logrus.WithField("SHELLHUB_LOG_FORMAT", format).Error("using text as log format due SHELLHUB_LOG_FORMAT invalid value") + } + } } diff --git a/pkg/loglevel/loglevel_test.go b/pkg/loglevel/loglevel_test.go index 47524a9b7f4..c0ee563a5ad 100644 --- a/pkg/loglevel/loglevel_test.go +++ b/pkg/loglevel/loglevel_test.go @@ -57,3 +57,55 @@ func TestSetLevels(t *testing.T) { }) } } + +func formatterToString(formatter logrus.Formatter) LogFormat { + switch formatter.(type) { + case *logrus.JSONFormatter: + return LogFormatJSON + case *logrus.TextFormatter: + return LogFormatText + default: + return LogFormatText + } +} + +func TestSetFormat(t *testing.T) { + mocks := &envMocks.Backend{} + envs.DefaultBackend = mocks + + cases := []struct { + description string + requiredMocks func() + expected LogFormat + }{ + { + description: "Set log format to json when SHELLHUB_LOG_FORMAT is set to json", + requiredMocks: func() { + mocks.On("Get", "SHELLHUB_LOG_FORMAT").Return("json").Once() + }, + expected: LogFormatJSON, + }, + { + description: "Set log format to text when SHELLHUB_LOG_FORMAT is set to text", + requiredMocks: func() { + mocks.On("Get", "SHELLHUB_LOG_FORMAT").Return("text").Once() + }, + expected: LogFormatText, + }, + { + description: "Set log format to text when SHELLHUB_LOG_FORMAT is invalid", + requiredMocks: func() { + mocks.On("Get", "SHELLHUB_LOG_FORMAT").Return("invalid").Once() + }, + expected: LogFormatText, + }, + } + for _, tc := range cases { + t.Run(tc.description, func(t *testing.T) { + tc.requiredMocks() + + SetLogFormat() + assert.Equal(t, tc.expected, formatterToString(logrus.StandardLogger().Formatter)) + }) + } +} diff --git a/pkg/middleware/log.go b/pkg/middleware/log.go index 1e955d75225..adae55fb169 100644 --- a/pkg/middleware/log.go +++ b/pkg/middleware/log.go @@ -1,48 +1,222 @@ package middleware import ( + "io" + "maps" "strconv" "time" echo "github.com/labstack/echo/v4" + "github.com/labstack/gommon/log" "github.com/sirupsen/logrus" ) -func Log(next echo.HandlerFunc) echo.HandlerFunc { - log := logrus.New() - log.SetFormatter(&logrus.JSONFormatter{}) +type Logger struct { + prefix string + logger *logrus.Entry +} - return func(c echo.Context) error { - level := logrus.InfoLevel +var _ echo.Logger = (*Logger)(nil) + +func NewEchoLogger(logger *logrus.Entry) echo.Logger { + return &Logger{ + prefix: "", + logger: logger, + } +} + +// Debug implements echo.Logger. +func (c *Logger) Debug(i ...any) { + c.logger.Debug(i...) +} + +// Debugf implements echo.Logger. +func (c *Logger) Debugf(format string, args ...any) { + c.logger.Debugf(format, args...) +} + +// Debugj implements echo.Logger. +func (c *Logger) Debugj(j log.JSON) { + m := make(logrus.Fields) + maps.Copy(m, j) + + c.logger.WithFields(m).Debug() +} + +// Info implements echo.Logger. +func (c *Logger) Info(i ...any) { + c.logger.Info(i...) +} + +// Infof implements echo.Logger. +func (c *Logger) Infof(format string, args ...any) { + c.logger.Infof(format, args...) +} + +// Print implements echo.Logger. +func (c *Logger) Print(i ...any) { + c.logger.Print(i...) +} + +// Printf implements echo.Logger. +func (c *Logger) Printf(format string, args ...any) { + c.logger.Printf(format, args...) +} + +// Printj implements echo.Logger. +func (c *Logger) Printj(j log.JSON) { + m := make(logrus.Fields) + maps.Copy(m, j) - // Assign request tracking ID to log entry - entry := logrus.NewEntry(log).WithFields(logrus.Fields{ - "id": c.Request().Header.Get(echo.HeaderXRequestID), - }) + c.logger.WithFields(m).Print() +} + +// Infoj implements echo.Logger. +func (c *Logger) Infoj(j log.JSON) { + m := make(logrus.Fields) + maps.Copy(m, j) + + c.logger.WithFields(m).Info() +} + +// Warn implements echo.Logger. +func (c *Logger) Warn(i ...any) { + c.logger.Warn(i...) +} + +// Warnf implements echo.Logger. +func (c *Logger) Warnf(format string, args ...any) { + c.logger.Warnf(format, args...) +} + +// Warnj implements echo.Logger. +func (c *Logger) Warnj(j log.JSON) { + m := make(logrus.Fields) + maps.Copy(m, j) + + c.logger.WithFields(m).Warn() +} + +// Error implements echo.Logger. +func (c *Logger) Error(i ...any) { + c.logger.Error(i...) +} + +// Errorf implements echo.Logger. +func (c *Logger) Errorf(format string, args ...any) { + c.logger.Errorf(format, args...) +} + +// Errorj implements echo.Logger. +func (c *Logger) Errorj(j log.JSON) { + m := make(logrus.Fields) + maps.Copy(m, j) + + c.logger.WithFields(m).Error() +} + +// Fatal implements echo.Logger. +func (c *Logger) Fatal(i ...any) { + c.logger.Fatal(i...) +} + +// Fatalf implements echo.Logger. +func (c *Logger) Fatalf(format string, args ...any) { + c.logger.Fatalf(format, args...) +} - // Set context log entry - c.Set("log", entry) +// Fatalj implements echo.Logger. +func (c *Logger) Fatalj(j log.JSON) { + m := make(logrus.Fields) + maps.Copy(m, j) + + c.logger.WithFields(m).Fatal() +} + +// Panic implements echo.Logger. +func (c *Logger) Panic(i ...any) { + c.logger.Panic(i...) +} + +// Panicf implements echo.Logger. +func (c *Logger) Panicf(format string, args ...any) { + c.logger.Panicf(format, args...) +} + +// Panicj implements echo.Logger. +func (c *Logger) Panicj(j log.JSON) { + m := make(logrus.Fields) + maps.Copy(m, j) + + c.logger.WithFields(m).Panic() +} + +// Level implements echo.Logger. +func (c *Logger) Level() log.Lvl { + // NOTE: It is safe to convert logrus.Level to int because logrus's max value is lower than uint8's max value. + return log.Lvl(int(c.logger.Level)) //nolint: gosec +} + +// SetLevel implements echo.Logger. +func (c *Logger) SetLevel(v log.Lvl) { + // NOTE: It is safe to convert log.Lvl to int because logrus's max value is lower than uint8's max value. + c.logger.Level = logrus.Level(int(v)) //nolint: gosec +} + +// Output implements echo.Logger. +func (c *Logger) Output() io.Writer { + return c.logger.Logger.Out +} + +// SetOutput implements echo.Logger. +func (c *Logger) SetOutput(w io.Writer) { + c.logger.Logger.Out = w +} + +// Prefix implements echo.Logger. +func (c *Logger) Prefix() string { + return c.prefix +} + +// SetPrefix implements echo.Logger. +func (c *Logger) SetPrefix(p string) { + c.prefix = p +} + +// SetHeader implements echo.Logger. +func (c *Logger) SetHeader(h string) { + panic("unimplemented") +} + +const ( + // HeaderUserID is the HTTP header where the user ID is stored. + HeaderUserID = "X-ID" + // HeaderTenantID is the HTTP header where the tenant ID is stored. + HeaderTenantID = "X-Tenant-ID" +) + +func Log(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + logger := c.Logger() - // Measure request execution time start := time.Now() - err := next(c) - elapsed := time.Since(start) - // Append error fields to log entry if request has returned an error + // NOTE: The next must be called to proceed to the next handler in the chain that should be the processing of + // the request itself. + err := next(c) if err != nil { - level = logrus.ErrorLevel - entry = entry.WithError(err) - c.Error(err) } + elapsed := time.Since(start) + bytesIn := c.Request().Header.Get(echo.HeaderContentLength) if bytesIn == "" { bytesIn = "0" } - // Request finished log entry - entry.WithFields(logrus.Fields{ + fields := log.JSON{ + "id": c.Request().Header.Get(echo.HeaderXRequestID), "remote_ip": c.RealIP(), "host": c.Request().Host, "uri": c.Request().RequestURI, @@ -53,7 +227,25 @@ func Log(next echo.HandlerFunc) echo.HandlerFunc { "latency_human": elapsed.String(), "bytes_in": bytesIn, "bytes_out": strconv.FormatInt(c.Response().Size, 10), - }).Log(level) + } + + uid := c.Request().Header.Get(HeaderUserID) + if uid != "" { + fields["user"] = uid + } + + tenant := c.Request().Header.Get(HeaderTenantID) + if tenant != "" { + fields["tenant"] = tenant + } + + if err != nil { + fields["error"] = err.Error() + + logger.Errorj(fields) + } else { + logger.Infoj(fields) + } return nil } diff --git a/pkg/mocks/doc.go b/pkg/mocks/doc.go deleted file mode 100644 index 5e72005a5c6..00000000000 --- a/pkg/mocks/doc.go +++ /dev/null @@ -1,3 +0,0 @@ -// Package mocks offers standard builders and a mockery interface for testing -// external interfaces. -package mocks diff --git a/pkg/mocks/io.go b/pkg/mocks/io.go deleted file mode 100644 index 6d28ff94f90..00000000000 --- a/pkg/mocks/io.go +++ /dev/null @@ -1,175 +0,0 @@ -package mocks - -import ( - "errors" - "io" -) - -// result captures a predefined read/write result. -type result struct { - n int // number of bytes to return on Read/Write - err error // error to return on Read/Write -} - -// ReadCloser is a mock implementation of io.ReadCloser that behaves based on a predefined -// pipeline of read results. It can also mimic reading actual content when provided. -type ReadCloser struct { - pipeline []result // a list of predefined read results - content []byte // actual content to read if provided - close func() error // custom close function to simulate closing behavior - closed bool // indicates if the Reader has been closed -} - -// NewReader initializes and returns a new Reader without any predefined behavior. -func NewReader() *ReadCloser { - return &ReadCloser{ - pipeline: []result{}, - content: nil, - closed: false, - close: func() error { - return nil - }, - } -} - -// WithContent sets the provided content for the Reader to return on its next Read. -// It will override any predefined pipeline. -func (r *ReadCloser) WithContent(content []byte) *ReadCloser { - r.content = content - r.pipeline = []result{} - - return r -} - -// OnRead appends a predefined read result to the pipeline. The result will be used -// in the order added when the Reader's Read method is called. -func (r *ReadCloser) OnRead(n int, err error) *ReadCloser { - r.pipeline = append(r.pipeline, result{n: n, err: err}) - - return r -} - -// OnClose sets a custom error return for the Close method. -func (r *ReadCloser) OnClose(err error) *ReadCloser { - r.close = func() error { - r.closed = true - - return err - } - - return r -} - -// EOF appends an EOF result to the pipeline, signaling the end of the data stream. -func (r *ReadCloser) EOF() *ReadCloser { - r.pipeline = append(r.pipeline, result{n: 0, err: io.EOF}) - - return r -} - -// Read reads data based on the predefined pipeline. If actual content is provided, -// it will read from that content instead of the pipeline. -func (r *ReadCloser) Read(p []byte) (int, error) { - if r.closed { - return 0, errors.New("reader is closed") - } - - if r.content != nil { - copy(p, r.content) - n := len(r.content) - r.content = nil - - return n, nil - } - - if len(r.pipeline) == 0 { - return 0, nil - } - - resp := r.pipeline[0] - r.pipeline = r.pipeline[1:] - - return resp.n, resp.err -} - -// Close closes the Reader, invoking any predefined closing behavior. -func (r *ReadCloser) Close() error { - return r.close() -} - -// WriteCloser is a mock implementation of io.WriteCloser that behaves based on a predefined -// pipeline of write results. -type WriteCloser struct { - pipeline []result // a list of predefined write results - content []byte // actual content that was written - close func() error // custom close function to simulate closing behavior - closed bool // indicates if the WriteCloser has been closed -} - -// NewWriteCloser initializes and returns a new Writer without any predefined behavior. -// It can also mimic writing actual content when provided. -func NewWriteCloser() *WriteCloser { - return &WriteCloser{ - pipeline: []result{}, - content: nil, - closed: false, - close: func() error { - return nil - }, - } -} - -// WithContent sets the provided content for the Writer. -func (wc *WriteCloser) WithContent(content []byte) *WriteCloser { - wc.content = content - wc.pipeline = []result{} - - return wc -} - -// OnWrite appends a predefined write result to the pipeline. The result will be used -// in the order added when the Writer's Write method is called. -func (wc *WriteCloser) OnWrite(n int, err error) *WriteCloser { - wc.pipeline = append(wc.pipeline, result{n: n, err: err}) - - return wc -} - -// OnClose sets a custom error return for the Close method. -func (wc *WriteCloser) OnClose(err error) *WriteCloser { - wc.close = func() error { - wc.closed = true - - return err - } - - return wc -} - -// Write writes data based on the predefined pipeline. It also stores the actual written content. -func (wc *WriteCloser) Write(p []byte) (int, error) { - if wc.closed { - return 0, errors.New("writecloser is closed") - } - - if wc.content != nil { - length := len(wc.content) - wc.content = append(wc.content, p[:length]...) - - return length, nil - } - - if len(wc.pipeline) == 0 { - return 0, nil - } - - resp := wc.pipeline[0] - wc.pipeline = wc.pipeline[1:] - - return resp.n, resp.err -} - -// Close closes the WriteCloser, invoking any predefined closing behavior. -func (wc *WriteCloser) Close() error { - return wc.close() -} diff --git a/pkg/models/announcement.go b/pkg/models/announcement.go deleted file mode 100644 index 93e7693b332..00000000000 --- a/pkg/models/announcement.go +++ /dev/null @@ -1,18 +0,0 @@ -package models - -import ( - "time" -) - -type AnnouncementShort struct { - UUID string `json:"uuid" bson:"uuid"` - Title string `json:"title" bson:"title"` - Date time.Time `json:"date" bson:"date"` -} - -type Announcement struct { - UUID string `json:"uuid" bson:"uuid"` - Title string `json:"title" bson:"title"` - Content string `json:"content" bson:"content"` - Date time.Time `json:"date" bson:"date"` -} diff --git a/pkg/models/api_key.go b/pkg/models/api_key.go new file mode 100644 index 00000000000..5d084df2717 --- /dev/null +++ b/pkg/models/api_key.go @@ -0,0 +1,53 @@ +package models + +import ( + "time" + + "github.com/shellhub-io/shellhub/pkg/api/authorizer" +) + +// APIKey is used to authenticate a request. It is similar to [UserAuthClaims] but only for +// namespace information, which means that user-related routes are blocked for use with api keys. +// The ID and key are never returned to the end user; the "external" identification must be made +// by name and tenant only. +// +// Expired keys cannot be used for authentication. Use [APIKey.IsValid] to verify its validity. +type APIKey struct { + // ID is the unique identifier of the API key. It is a SHA256 hash of a UUID. + ID string `json:"-" bson:"_id"` + // Name is an external identifier for a given API key. It is not unique per document but + // is unique per tenant ID. + Name string `json:"name" bson:"name"` + // TenantID is the API key's namespace ID. + TenantID string `json:"tenant_id" bson:"tenant_id"` + // Role defines the permissions of the API key. It must be equal to or less than the creator's role. + Role authorizer.Role `json:"role" bson:"role" validate:"required,oneof=administrator operator observer"` + // CreatedBy is the ID of the user who created the API key. + CreatedBy string `json:"created_by" bson:"created_by"` + // CreatedAt is the creation date of the API key. + CreatedAt time.Time `json:"created_at" bson:"created_at"` + // UpdatedAt is the last update date of the API key. + UpdatedAt time.Time `json:"updated_at" bson:"updated_at"` + // ExpiresIn is the expiration date of the API key. An expired key cannot be used for + // authentication. When equals or less than 0 it means that are no expiration date. + ExpiresIn int64 `json:"expires_in" bson:"expires_in"` +} + +// IsValid reports whether an API key is valid or not. +func (a *APIKey) IsValid() bool { + if a.ExpiresIn <= 0 { + return true + } + + now := time.Unix(time.Now().Unix(), 0) + expiresIn := time.Unix(a.ExpiresIn, 0) + + return now.Before(expiresIn) +} + +// APIKeyConflicts holds API keys attributes that must be unique for each item (per tenant ID) and can be utilized in queries +// to identify conflicts. +type APIKeyConflicts struct { + ID string `bson:"_id"` + Name string `bson:"name"` +} diff --git a/pkg/models/device.go b/pkg/models/device.go index b14d10fe582..063fcb42a5d 100644 --- a/pkg/models/device.go +++ b/pkg/models/device.go @@ -2,8 +2,6 @@ package models import ( "time" - - jwt "github.com/golang-jwt/jwt/v4" ) type DeviceStatus string @@ -14,39 +12,44 @@ const ( DeviceStatusRejected DeviceStatus = "rejected" DeviceStatusRemoved DeviceStatus = "removed" DeviceStatusUnused DeviceStatus = "unused" + DeviceStatusEmpty DeviceStatus = "" ) type Device struct { // UID is the unique identifier for a device. - UID string `json:"uid"` - Name string `json:"name" bson:"name,omitempty" validate:"required,device_name"` - Identity *DeviceIdentity `json:"identity"` - Info *DeviceInfo `json:"info"` - PublicKey string `json:"public_key" bson:"public_key"` - TenantID string `json:"tenant_id" bson:"tenant_id"` - LastSeen time.Time `json:"last_seen" bson:"last_seen"` - Online bool `json:"online" bson:",omitempty"` - Namespace string `json:"namespace" bson:",omitempty"` - Status DeviceStatus `json:"status" bson:"status,omitempty" validate:"oneof=accepted rejected pending unused"` - StatusUpdatedAt time.Time `json:"status_updated_at" bson:"status_updated_at,omitempty"` - CreatedAt time.Time `json:"created_at" bson:"created_at,omitempty"` - RemoteAddr string `json:"remote_addr" bson:"remote_addr"` - Position *DevicePosition `json:"position" bson:"position"` - Tags []string `json:"tags" bson:"tags,omitempty"` - PublicURL bool `json:"public_url" bson:"public_url,omitempty"` - PublicURLAddress string `json:"public_url_address" bson:"public_url_address,omitempty"` - Acceptable bool `json:"acceptable" bson:"acceptable,omitempty"` -} - -type DeviceAuthClaims struct { UID string `json:"uid"` - AuthClaims `mapstruct:",squash"` - jwt.RegisteredClaims `mapstruct:",squash"` -} + CreatedAt time.Time `json:"created_at" bson:"created_at,omitempty"` + RemovedAt *time.Time `json:"removed_at" bson:"removed_at"` -func (d *DeviceAuthClaims) SetRegisteredClaims(claims jwt.RegisteredClaims) { - d.RegisteredClaims = claims + Name string `json:"name" bson:"name,omitempty" validate:"required,device_name"` + Identity *DeviceIdentity `json:"identity"` + Info *DeviceInfo `json:"info"` + PublicKey string `json:"public_key" bson:"public_key"` + TenantID string `json:"tenant_id" bson:"tenant_id"` + + // LastSeen represents the timestamp of the most recent ping from the device to the server. + LastSeen time.Time `json:"last_seen" bson:"last_seen"` + // DisconnectedAt stores the timestamp when the device disconnected from the server. + // When nil, it indicates the device is potentially online. + // + // Due to potential network issues, this field might be nil even when the device + // is actually offline. For reliable connection status, check both this and + // [Device.LastSeen] fields. + DisconnectedAt *time.Time `json:"-" bson:"disconnected_at"` + // Online indicates whether the device is currently connected. This field is not + // persisted to the database but is computed based on both [Device.LastSeen] and + // [Device.DisconnectedAt] fields to determine the current connection status. + Online bool `json:"online" bson:",omitempty"` + + Namespace string `json:"namespace" bson:",omitempty"` + Status DeviceStatus `json:"status" bson:"status,omitempty" validate:"oneof=accepted rejected pending unused"` + StatusUpdatedAt time.Time `json:"status_updated_at" bson:"status_updated_at,omitempty"` + RemoteAddr string `json:"remote_addr" bson:"remote_addr"` + Position *DevicePosition `json:"position" bson:"position"` + Acceptable bool `json:"acceptable" bson:"acceptable,omitempty"` + + Taggable `json:",inline" bson:",inline"` } type DeviceAuthRequest struct { @@ -67,6 +70,12 @@ type DeviceAuthResponse struct { Token string `json:"token"` Name string `json:"name"` Namespace string `json:"namespace"` + // Config holds device-specific configuration settings. + // This can include various parameters that the device needs to operate correctly. + // The structure of this map can vary depending on the device type and its requirements. + // Example configurations might include network settings, operational modes, or feature toggles. + // It's designed to be flexible to accommodate different device needs. + Config map[string]any `json:"config,omitempty"` } type DeviceIdentity struct { @@ -81,23 +90,11 @@ type DeviceInfo struct { Platform string `json:"platform"` } -type ConnectedDevice struct { - UID string `json:"uid"` - TenantID string `json:"tenant_id" bson:"tenant_id"` - LastSeen time.Time `json:"last_seen" bson:"last_seen"` - Status string `json:"status" bson:"status"` -} - type DevicePosition struct { Latitude float64 `json:"latitude" bson:"latitude"` Longitude float64 `json:"longitude" bson:"longitude"` } -type DeviceRemoved struct { - Device *Device `json:"device" bson:"device"` - Timestamp time.Time `json:"timestamp" bson:"timestamp"` -} - type DeviceTag struct { Tag string `validate:"required,min=3,max=255,alphanum,ascii,excludes=/@&:"` } @@ -107,3 +104,16 @@ func NewDeviceTag(tag string) DeviceTag { Tag: tag, } } + +// DeviceConflicts holds user attributes that must be unique for each itam and can be utilized in queries +// to identify conflicts. +type DeviceConflicts struct { + Name string +} + +// Distinct removes the c's attributes whether it's equal to the device attribute. +func (c *DeviceConflicts) Distinct(device *Device) { + if c.Name == device.Name { + c.Name = "" + } +} diff --git a/pkg/models/license.go b/pkg/models/license.go deleted file mode 100644 index 81044674a43..00000000000 --- a/pkg/models/license.go +++ /dev/null @@ -1,8 +0,0 @@ -package models - -import "time" - -type License struct { - RawData []byte - CreatedAt time.Time `json:"created_at" bson:"created_at"` -} diff --git a/pkg/models/member.go b/pkg/models/member.go new file mode 100644 index 00000000000..77bea7908b0 --- /dev/null +++ b/pkg/models/member.go @@ -0,0 +1,14 @@ +package models + +import ( + "time" + + "github.com/shellhub-io/shellhub/pkg/api/authorizer" +) + +type Member struct { + ID string `json:"id,omitempty" bson:"id,omitempty"` + AddedAt time.Time `json:"added_at" bson:"added_at"` + Email string `json:"email" bson:"email,omitempty" validate:"email"` + Role authorizer.Role `json:"role" bson:"role" validate:"required,oneof=administrator operator observer"` +} diff --git a/pkg/models/membership-invitation.go b/pkg/models/membership-invitation.go new file mode 100644 index 00000000000..f5cdfb8e621 --- /dev/null +++ b/pkg/models/membership-invitation.go @@ -0,0 +1,44 @@ +package models + +import ( + "time" + + "github.com/shellhub-io/shellhub/pkg/api/authorizer" + "github.com/shellhub-io/shellhub/pkg/clock" +) + +type MembershipInvitationStatus string + +const ( + MembershipInvitationStatusPending MembershipInvitationStatus = "pending" + MembershipInvitationStatusAccepted MembershipInvitationStatus = "accepted" + MembershipInvitationStatusRejected MembershipInvitationStatus = "rejected" + MembershipInvitationStatusCancelled MembershipInvitationStatus = "cancelled" +) + +type MembershipInvitation struct { + ID string `json:"-" bson:"_id"` + TenantID string `json:"-" bson:"tenant_id"` + UserID string `json:"-" bson:"user_id"` + InvitedBy string `json:"invited_by" bson:"invited_by"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` + UpdatedAt time.Time `json:"updated_at" bson:"updated_at"` + ExpiresAt *time.Time `json:"expires_at" bson:"expires_at"` + Status MembershipInvitationStatus `json:"status" bson:"status"` + StatusUpdatedAt time.Time `json:"status_updated_at" bson:"status_updated_at"` + Role authorizer.Role `json:"role" bson:"role"` + Invitations int `json:"-" bson:"invitations"` + + // NamespaceName isn't saved on the database + NamespaceName string `json:"-" bson:"namespace_name,omitempty"` + // UserEmail isn't saved on the database + UserEmail string `json:"-" bson:"user_email,omitempty"` +} + +func (m MembershipInvitation) IsExpired() bool { + return m.ExpiresAt != nil && m.ExpiresAt.Before(clock.Now()) +} + +func (m MembershipInvitation) IsPending() bool { + return m.Status == MembershipInvitationStatusPending +} diff --git a/pkg/models/mfa.go b/pkg/models/mfa.go deleted file mode 100644 index b3925212eaa..00000000000 --- a/pkg/models/mfa.go +++ /dev/null @@ -1,20 +0,0 @@ -package models - -type EnableMFA struct { - TokenMFA string `json:"token_mfa"` - Secret string `json:"secret"` - Codes []string `json:"codes" bson:"codes"` -} - -type MFA struct { - Enable bool `json:"enable"` - Validate bool `json:"validate"` -} - -type GetCodes struct { - Codes []string `json:"codes" bson:"codes"` -} - -type Code struct { - Code string `json:"code"` -} diff --git a/pkg/models/mqtt.go b/pkg/models/mqtt.go deleted file mode 100644 index ab521a764fb..00000000000 --- a/pkg/models/mqtt.go +++ /dev/null @@ -1,30 +0,0 @@ -package models - -const ( - MqttClientConnectedEventType = "client_connected" - MqttClientDisconnectedEventType = "client_disconnected" -) - -type MqttEvent struct { - Action string `json:"action"` - - MqttClientEvent -} - -type MqttClientEvent struct { - ClientID string `json:"client_id"` - Username string `json:"username"` -} - -type MqttAuthQuery struct { - Username string `query:"username"` - Password string `query:"password"` - IPAddr string `query:"ipaddr"` -} - -type MqttACLQuery struct { - Access string `query:"access"` - Username string `query:"username"` - Topic string `query:"topic"` - IPAddr string `query:"ipaddr"` -} diff --git a/pkg/models/namespace.go b/pkg/models/namespace.go index dcdd1c6e302..82095511ac9 100644 --- a/pkg/models/namespace.go +++ b/pkg/models/namespace.go @@ -1,21 +1,25 @@ package models -import ( - "time" -) +import "time" type Namespace struct { - Name string `json:"name" validate:"required,hostname_rfc1123,excludes=.,lowercase"` - Owner string `json:"owner"` - TenantID string `json:"tenant_id" bson:"tenant_id,omitempty"` - Members []Member `json:"members" bson:"members"` - Settings *NamespaceSettings `json:"settings"` - Devices int `json:"-" bson:"devices,omitempty"` - Sessions int `json:"-" bson:"sessions,omitempty"` - MaxDevices int `json:"max_devices" bson:"max_devices"` - DevicesCount int `json:"devices_count" bson:"devices_count,omitempty"` - CreatedAt time.Time `json:"created_at" bson:"created_at"` - Billing *Billing `json:"billing" bson:"billing,omitempty"` + Name string `json:"name" validate:"required,hostname_rfc1123,excludes=.,lowercase"` + Owner string `json:"owner"` + TenantID string `json:"tenant_id" bson:"tenant_id,omitempty"` + Members []Member `json:"members" bson:"members"` + Settings *NamespaceSettings `json:"settings"` + Devices int `json:"-" bson:"devices,omitempty"` + + DevicesAcceptedCount int64 `json:"devices_accepted_count" bson:"devices_accepted_count"` + DevicesPendingCount int64 `json:"devices_pending_count" bson:"devices_pending_count"` + DevicesRejectedCount int64 `json:"devices_rejected_count" bson:"devices_rejected_count"` + DevicesRemovedCount int64 `json:"devices_removed_count" bson:"devices_removed_count"` + + Sessions int `json:"-" bson:"sessions,omitempty"` + MaxDevices int `json:"max_devices" bson:"max_devices"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` + Billing *Billing `json:"billing" bson:"billing,omitempty"` + Type Type `json:"type" bson:"type"` } // HasMaxDevices checks if the namespace has a maximum number of devices. @@ -27,8 +31,10 @@ func (n *Namespace) HasMaxDevices() bool { } // HasMaxDevicesReached checks if the namespace has reached the maximum number of devices. +// Only counts accepted devices. Removed devices no longer count towards the limit, +// allowing immediate slot reuse after deletion. func (n *Namespace) HasMaxDevicesReached() bool { - return uint64(n.DevicesCount) >= uint64(n.MaxDevices) + return n.DevicesAcceptedCount >= int64(n.MaxDevices) } // FindMember checks if a member with the specified ID exists in the namespace. @@ -43,11 +49,42 @@ func (n *Namespace) FindMember(id string) (*Member, bool) { } type NamespaceSettings struct { - SessionRecord bool `json:"session_record" bson:"session_record,omitempty"` + SessionRecord bool `json:"session_record" bson:"session_record,omitempty"` + ConnectionAnnouncement string `json:"connection_announcement" bson:"connection_announcement"` +} + +// default Announcement Message for the shellhub namespace +const DefaultAnnouncementMessage = ` +****************************************************************** +* * +* Welcome to ShellHub Community Edition! * +* * +* ShellHub is a next-generation SSH server, providing a * +* seamless, secure, and user-friendly solution for remote * +* access management. With ShellHub, you can manage all your * +* devices effortlessly from a single platform, ensuring optimal * +* security and productivity. * +* * +* Want to learn more about ShellHub and explore other editions? * +* Visit: https://shellhub.io * +* * +* Join our community and contribute to our open-source project: * +* https://github.com/shellhub-io/shellhub * +* * +* For assistance, please contact the system administrator. * +* * +****************************************************************** +` + +// NamespaceConflicts holds namespace attributes that must be unique for each document and can be utilized in queries +// to identify conflicts. +type NamespaceConflicts struct { + Name string } -type Member struct { - ID string `json:"id,omitempty" bson:"id,omitempty"` - Username string `json:"username,omitempty" bson:"username,omitempty" validate:"username"` - Role string `json:"role" bson:"role" validate:"required,oneof=administrator operator observer"` +// Distinct removes the c attributes whether it's equal to the namespace attribute. +func (c *NamespaceConflicts) Distinct(namespace *Namespace) { + if c.Name == namespace.Name { + c.Name = "" + } } diff --git a/pkg/models/publickey.go b/pkg/models/publickey.go index 20f6536959b..41fe1d16c73 100644 --- a/pkg/models/publickey.go +++ b/pkg/models/publickey.go @@ -11,8 +11,8 @@ import ( // // A PublicKeyFilter can contain either Hostname, string, or Tags, slice of strings never both. type PublicKeyFilter struct { - Hostname string `json:"hostname,omitempty" bson:"hostname,omitempty" validate:"required_without=Tags,excluded_with=Tags,regexp"` - Tags []string `json:"tags,omitempty" bson:"tags,omitempty" validate:"required_without=Hostname,excluded_with=Hostname,max=3,unique,dive,min=3,max=255,alphanum,ascii,excludes=/@&:"` + Hostname string `json:"hostname,omitempty" bson:"hostname,omitempty" validate:"required_without=Tags,excluded_with=Tags,regexp"` + Taggable `json:",inline" bson:",inline"` } type PublicKeyFields struct { diff --git a/pkg/models/session.go b/pkg/models/session.go index fa1e8bf8cd1..910bba29220 100644 --- a/pkg/models/session.go +++ b/pkg/models/session.go @@ -12,19 +12,20 @@ type SessionPosition struct { type Session struct { UID string `json:"uid"` DeviceUID UID `json:"device_uid,omitempty" bson:"device_uid"` - Device *Device `json:"device" bson:"device,omitempty"` + Device *Device `json:"device" bson:"-"` TenantID string `json:"tenant_id" bson:"tenant_id"` Username string `json:"username"` IPAddress string `json:"ip_address" bson:"ip_address"` StartedAt time.Time `json:"started_at" bson:"started_at"` LastSeen time.Time `json:"last_seen" bson:"last_seen"` - Active bool `json:"active" bson:",omitempty"` + Active bool `json:"active" bson:"active"` Closed bool `json:"-" bson:"closed"` Authenticated bool `json:"authenticated" bson:"authenticated"` Recorded bool `json:"recorded" bson:"recorded"` Type string `json:"type" bson:"type"` Term string `json:"term" bson:"term"` Position SessionPosition `json:"position" bson:"position"` + Events SessionEvents `json:"events" bson:"events"` } type ActiveSession struct { @@ -33,6 +34,10 @@ type ActiveSession struct { TenantID string `json:"tenant_id" bson:"tenant_id"` } +// NOTE: This struct has been moved to the cloud repo as it is only used in a cloud context; +// however, it is also utilized by migrations. For this reason, we must maintain the struct +// here ensure everything continues to function as expected. +// TODO: Remove this struct when it is no longer needed for migrations. type RecordedSession struct { UID UID `json:"uid"` Message string `json:"message" bson:"message"` @@ -46,10 +51,63 @@ type Status struct { Authenticated bool `json:"authenticated"` } -type SessionRecorded struct { - UID string `json:"uid"` - Namespace string `json:"namespace" bson:"namespace"` - Message string `json:"message" bson:"message"` - Width int `json:"width" bson:"width,omitempty"` - Height int `json:"height" bson:"height,omitempty"` +type SessionUpdate struct { + Recorded *bool `json:"recorded"` + Authenticated *bool `json:"authenticated"` + Type *string `json:"type"` +} + +type SessionEventType string + +const ( + // ShellHub custom requests. + SessionEventTypePtyOutput SessionEventType = "pty-output" + + // Terminal (PTY) request types + SessionEventTypePtyRequest SessionEventType = "pty-req" + SessionEventTypeWindowChange SessionEventType = "window-change" + SessionEventTypeExitCode SessionEventType = "exit-code" + + // Process-related requests + SessionEventTypeExitStatus SessionEventType = "exit-status" + SessionEventTypeExitSignal SessionEventType = "exit-signal" + + // Environment and Shell requests + SessionEventTypeEnv SessionEventType = "env" + SessionEventTypeShell SessionEventType = "shell" + SessionEventTypeExec SessionEventType = "exec" + SessionEventTypeSubsystem SessionEventType = "subsystem" + + // Signal and forwarding requests + SessionEventTypeSignal SessionEventType = "signal" + SessionEventTypeTcpipForward SessionEventType = "tcpip-forward" + SessionEventTypeAuthAgentReq SessionEventType = "auth-agent-req" +) + +// SessionEvent represents a session event. +type SessionEvent struct { + // Session is the session UID where the event occurred. + Session string `json:"session" bson:"session,omitempty"` + // Type of the session. Normally, it is the SSH request name. + Type SessionEventType `json:"type" bson:"type"` + // Timestamp contains the time when the event was logged. + Timestamp time.Time `json:"timestamp" bson:"timestamp"` + // Data is a generic structure containing data of the event, normally the unmarshaling data of the request. + Data any `json:"data" bson:"data"` + // Seat is the seat where the event occurred. + Seat int `json:"seat" bson:"seat"` +} + +// SessionEvents stores the events registered in a session. +type SessionEvents struct { + // Types field is a set of sessions type to simplify the indexing on the database. + Types []string `json:"types" bson:"types,omitempty"` + // Seats contains a list of seats of events. + Seats []int `json:"seats" bson:"seats,omitempty"` +} + +// SessionSeat stores a session's seat. +type SessionSeat struct { + // ID is the identifier of session's seat. + ID int `json:"id" bson:"id,omitempty"` } diff --git a/pkg/models/ssh.go b/pkg/models/ssh.go new file mode 100644 index 00000000000..e99308d49fe --- /dev/null +++ b/pkg/models/ssh.go @@ -0,0 +1,41 @@ +package models + +type SSHCommand struct { + Command string `json:"command"` +} + +type SSHSubsystem struct { + Subsystem string `json:"subsystem"` +} + +type SSHExitStatus struct { + Status uint32 `json:"status"` +} + +type SSHSignal struct { + Name uint32 `json:"status"` + Dumped bool `json:"dumped"` + Message string `json:"message"` + Lang string `json:"lang"` +} + +type SSHWindowChange struct { + Columns uint32 `json:"columns"` + Rows uint32 `json:"rows"` + Width uint32 `json:"width"` + Height uint32 `json:"height"` +} + +// NOTE: [SSHPty] cannot use [SSHWindowChange] inside itself due [ssh.Unmarshal] issues. +type SSHPty struct { + Term string `json:"term"` + Columns uint32 `json:"columns" ` + Rows uint32 `json:"rows"` + Width uint32 `json:"width"` + Height uint32 `json:"height"` + Modelist []byte `json:"modelist"` +} + +type SSHPtyOutput struct { + Output string `json:"output"` +} diff --git a/pkg/models/system.go b/pkg/models/system.go index 2a560c5a9dc..e8c686a3c0c 100644 --- a/pkg/models/system.go +++ b/pkg/models/system.go @@ -1,11 +1,90 @@ package models -type SystemInfo struct { - Version string `json:"version"` - Endpoints *SystemInfoEndpoints `json:"endpoints"` +type System struct { + Setup bool `json:"setup"` + // Authentication manages the settings for available authentication methods, such as manual + // username/password authentication and SAML authentication. Each authentication method + // can be individually enabled or disabled. + Authentication *SystemAuthentication `json:"authentication" bson:"authentication"` } -type SystemInfoEndpoints struct { - API string `json:"api"` - SSH string `json:"ssh"` +type SystemAuthentication struct { + Local *SystemAuthenticationLocal `json:"local" bson:"local"` + SAML *SystemAuthenticationSAML `json:"saml" bson:"saml"` +} + +type SystemAuthenticationLocal struct { + // Enabled indicates whether manual authentication using a username and password is enabled or + // not. + Enabled bool `json:"enabled" bool:"enabled"` +} + +type SystemAuthenticationSAML struct { + // Enabled indicates whether SAML authentication is enabled. + Enabled bool `json:"enabled" bson:"enabled"` + Idp *SystemIdpSAML `json:"idp" bson:"idp"` + Sp *SystemSpSAML `json:"sp" bson:"sp"` +} + +type SystemAuthenticationBinding struct { + Post string `json:"post" bson:"post"` + Redirect string `json:"redirect" bson:"redirect"` + // PreferredBinding defines the preferred SAML binding method. + Preferred string `json:"preferred" bson:"preferred"` +} + +type SystemIdpSAML struct { + EntityID string `json:"entity_id" bson:"entity_id"` + Binding *SystemAuthenticationBinding `json:"binding" bson:"binding"` + // Certificates is a list of X.509 certificates provided by the IdP. These certificates are used + // by the SP to validate the digital signatures of SAML assertions issued by the IdP. + Certificates []string `json:"certificates" bson:"certificates"` + // Mappings defines how IdP SAML attributes map to ShellHub attributes. + // + // Example: + // { + // "external_id": "user_id", + // "email": "emailaddress", + // "name": "displayName" + // } + Mappings map[string]string `json:"mappings" bson:"mappings"` +} + +type SystemSpSAML struct { + // SignRequests indicates whether ShellHub should sign authentication requests. + // If enabled, an X509 certificate is used to sign the request, and the IdP must authenticate + // the request using the corresponding public certificate. Enabling this option disables + // the "IdP-initiated" authentication pipeline. + SignAuthRequests bool `json:"sign_auth_requests" bson:"sign_auth_requests"` + // Certificate is an X509 certificate that the IdP uses to verify the authenticity of the + // authentication request signed by ShellHub. This certificate corresponds to the private key + // in the [SystemSpSAML.PrivateKey] and it is only populated when [SystemSpSAML.SignAuthRequests] + // is true. + Certificate string `json:"certificate" bson:"certificate"` + // PrivateKey is an encrypted private key used by ShellHub to sign authentication requests. + // The IdP verifies the signature using the [SystemSpSAML.Certificate]. It is only populated + // when [SystemSpSAML.SignAuthRequests] is true. + PrivateKey string `json:"-" bson:"private_key"` +} + +type SAMLBinding struct { + URL string + Binding string +} + +const ( + SAMLBindingPost = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" + SAMLBindingRedirect = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" +) + +func (s *SystemIdpSAML) GetBinding() SAMLBinding { + if s.Binding.Preferred == "post" { + return SAMLBinding{URL: s.Binding.Post, Binding: SAMLBindingPost} + } else if s.Binding.Preferred == "redirect" { + return SAMLBinding{URL: s.Binding.Redirect, Binding: SAMLBindingRedirect} + } else if s.Binding.Post != "" { + return SAMLBinding{URL: s.Binding.Post, Binding: SAMLBindingPost} + } else { + return SAMLBinding{URL: s.Binding.Redirect, Binding: SAMLBindingRedirect} + } } diff --git a/pkg/models/tags.go b/pkg/models/tags.go new file mode 100644 index 00000000000..2da71a6f8b5 --- /dev/null +++ b/pkg/models/tags.go @@ -0,0 +1,40 @@ +package models + +import "time" + +// Taggable is an embeddable struct that adds tagging capability to other models. +// +// Example usage: +// +// type Device struct { +// Taggable // Embed the Taggable struct +// Name string // Other device fields +// } +type Taggable struct { + // TagIDs contains the IDs of associated tags. It is used only for database storage + // and relationship management. The field is not exposed in JSON responses to keep + // the API focused on meaningful tag data rather than internal identifiers. + TagIDs []string `json:"-" bson:"tag_ids"` + + // Tags contains the complete Tag objects associated with this resource. This field + // is populated from TagIDs when retrieving data from the database, but is not + // stored directly. It is used only for JSON serialization to provide clients + // with full tag information. + Tags []Tag `json:"tags" bson:"tags,omitempty"` +} + +type Tag struct { + ID string `json:"-" bson:"_id"` + TenantID string `json:"tenant_id" bson:"tenant_id"` + Name string `json:"name" bson:"name"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` + UpdatedAt time.Time `json:"updated_at" bson:"updated_at"` +} + +type TagChanges struct { + Name string `bson:"name,omitempty"` +} + +type TagConflicts struct { + Name string +} diff --git a/pkg/models/type.go b/pkg/models/type.go new file mode 100644 index 00000000000..58f673efef7 --- /dev/null +++ b/pkg/models/type.go @@ -0,0 +1,20 @@ +package models + +type Type string + +const ( + TypePersonal Type = "personal" + TypeTeam Type = "team" +) + +func NewDefaultType() Type { + return TypeTeam +} + +func IsTypeTeam(typeNamespace string) bool { + return Type(typeNamespace) == TypeTeam +} + +func IsTypePersonal(typeNamespace string) bool { + return Type(typeNamespace) == TypePersonal +} diff --git a/pkg/models/user.go b/pkg/models/user.go index c331338aec8..f68aa93f335 100644 --- a/pkg/models/user.go +++ b/pkg/models/user.go @@ -1,75 +1,140 @@ package models import ( - "crypto/sha256" - "encoding/hex" "time" - jwt "github.com/golang-jwt/jwt/v4" + "github.com/shellhub-io/shellhub/pkg/hash" "github.com/shellhub-io/shellhub/pkg/validator" ) -type UserData struct { - Name string `json:"name" validate:"required,name"` - Email string `json:"email" bson:",omitempty" validate:"required,email"` - Username string `json:"username" bson:",omitempty" validate:"required,username"` -} +type UserStatus string -type UserPassword struct { - // PlainPassword contains the plain text password. - PlainPassword string `json:"password" bson:"-" validate:"required,password"` - // HashedPassword contains the hashed pasword from plain text. - HashedPassword string `json:"-" bson:"password"` -} - -// NewUserPassword creates a new [UserPassword] and hashes it. -func NewUserPassword(password string) UserPassword { - model := UserPassword{ - PlainPassword: password, - } +const ( + // UserStatusNotConfirmed applies to cloud-only instances. This status is assigned to a user who has registered + // but has not yet confirmed their email address. + UserStatusNotConfirmed UserStatus = "not-confirmed" - model.Hash() + // UserStatusConfirmed indicates that the user has completed the registration process and confirmed their email address. + // Users in community and enterprise instances will always be created with this status. + UserStatusConfirmed UserStatus = "confirmed" +) - return model +func (s UserStatus) String() string { + return string(s) } -func (p *UserPassword) hash(string) string { - sum := sha256.Sum256([]byte(p.PlainPassword)) +type UserOrigin string - return hex.EncodeToString(sum[:]) -} +const ( + // UserOriginLocal indicates that the user was created through the standard signup process, without + // using third-party integrations like SSO IdPs. + UserOriginLocal UserOrigin = "local" -// Hash hashes the plain password. -func (p *UserPassword) Hash() string { - p.HashedPassword = p.hash(p.PlainPassword) + // UserOriginSAML indicates that the user was created using a SAML method. + UserOriginSAML UserOrigin = "SAML" +) - return p.HashedPassword +func (o UserOrigin) String() string { + return string(o) } -// Compare the hashed password with the parameter. -// -// The compared password must be hashed. -func (p *UserPassword) Compare(password UserPassword) bool { - return password.HashedPassword == p.HashedPassword -} +type UserAuthMethod string + +const ( + // UserAuthMethodLocal indicates that the user can authenticate using an email and password. + UserAuthMethodLocal UserAuthMethod = "local" + + // UserAuthMethodManual indicates that the user can authenticate using a third-party SAML application. + UserAuthMethodSAML UserAuthMethod = "saml" +) -func (p *UserPassword) String() string { - return p.HashedPassword +func (a UserAuthMethod) String() string { + return string(a) } type User struct { - ID string `json:"id,omitempty" bson:"_id,omitempty"` - Namespaces int `json:"namespaces" bson:"namespaces,omitempty"` + ID string `json:"id,omitempty" bson:"_id,omitempty"` + // Origin specifies the the user's signup method. + Origin UserOrigin `json:"-" bson:"origin"` + + // ExternalID represents the user's identifier in an external system. It is always empty when [User.Origin] + // is [UserOriginLocal]. + ExternalID string `json:"-" bson:"external_id"` + + Status UserStatus `json:"status" bson:"status"` + // MaxNamespaces represents the count of namespaces that the user can owns. MaxNamespaces int `json:"max_namespaces" bson:"max_namespaces"` - Confirmed bool `json:"confirmed"` CreatedAt time.Time `json:"created_at" bson:"created_at"` LastLogin time.Time `json:"last_login" bson:"last_login"` EmailMarketing bool `json:"email_marketing" bson:"email_marketing"` - MFA bool `json:"status_mfa" bson:"status_mfa"` - Secret string `json:"secret" bson:"secret"` - Codes []string `json:"codes" bson:"codes"` UserData `bson:",inline"` - UserPassword `bson:",inline"` + // MFA contains attributes related to a user's MFA settings. Use [UserMFA.Enabled] to + // check if MFA is active for the user. + // + // NOTE: MFA is available as a cloud-only feature and must be ignored in community. + MFA UserMFA `json:"mfa" bson:"mfa"` + Preferences UserPreferences `json:"preferences" bson:"preferences"` + Password UserPassword `bson:",inline"` + // Admin indicates whether the user has administrative privileges. + Admin bool `json:"admin" bson:"admin"` +} + +type UserData struct { + Name string `json:"name" validate:"required,name"` + Username string `json:"username" bson:"username" validate:"required,username"` + Email string `json:"email" bson:"email" validate:"required,email"` + // RecoveryEmail is a custom, non-unique email address that a user can use to recover their account + // when they lose access to all other methods. It must never be equal to [UserData.Email]. + // + // NOTE: Recovery email is available as a cloud-only feature and must be ignored in community. + RecoveryEmail string `json:"recovery_email" bson:"recovery_email" validate:"omitempty,email"` +} + +// UserMFA represents the attributes related to MFA for a user. +type UserMFA struct { + // Enabled reports whether MFA is enabled for the user. + Enabled bool `json:"enabled" bson:"enabled"` + // Secret is the key used for authenticating with the OTP server. + Secret string `json:"-" bson:"secret"` + // RecoveryCodes are recovery tokens that the user can use to regain account access if they lose their MFA device. + RecoveryCodes []string `json:"-" bson:"recovery_codes"` +} + +type UserPreferences struct { + // PreferredNamespace represents the namespace the user most recently authenticated with. + PreferredNamespace string `json:"-" bson:"preferred_namespace"` + + // AuthMethods indicates the authentication methods that the user can use to authenticate. + AuthMethods []UserAuthMethod `json:"auth_methods" bson:"auth_methods"` +} + +type UserPassword struct { + // Plain contains the plain text password. + Plain string `json:"password" bson:"-" validate:"required,password"` + // Hash contains the hashed pasword from plain text. + Hash string `json:"-" bson:"password"` +} + +// HashUserPassword receives a plain password and hash it, returning +// a [UserPassword]. +func HashUserPassword(plain string) (UserPassword, error) { + p := UserPassword{ + Plain: plain, + } + + var err error + p.Hash, err = hash.Do(p.Plain) + + return p, err +} + +// Compare reports whether a plain password matches with hash. +// +// For compatibility purposes, it can compare using both SHA256 and bcrypt algorithms. +// Hashes starting with "$" are assumed to be a bcrypt hash; otherwise, they are treated as +// SHA256 hashes. +func (p *UserPassword) Compare(plain string) bool { + return hash.CompareWith(plain, p.Hash) } // UserAuthIdentifier is an username or email used to authenticate. @@ -84,42 +149,70 @@ func (i *UserAuthIdentifier) IsEmail() bool { return true } -type UserAuthRequest struct { - // Identifier represents an username or email. - // - // TODO: change json tag from username to identifier and update the OpenAPI. - Identifier UserAuthIdentifier `json:"username"` - Password string `json:"password"` +type UserAuthResponse struct { + Token string `json:"token"` + User string `json:"user"` + Origin string `json:"origin"` + AuthMethods []UserAuthMethod `json:"auth_methods"` + Name string `json:"name"` + ID string `json:"id"` + Tenant string `json:"tenant"` + Email string `json:"email"` + RecoveryEmail string `json:"recovery_email"` + Role string `json:"role"` + MFA bool `json:"mfa"` + MaxNamespaces int `json:"max_namespaces"` + Admin bool `json:"admin"` } -type UserAuthResponse struct { - Token string `json:"token"` - User string `json:"user"` - Name string `json:"name"` - ID string `json:"id"` - Tenant string `json:"tenant"` - Role string `json:"role"` - Email string `json:"email"` - MFA MFA `json:"mfa" bson:"mfa"` +// NOTE: This struct has been moved to the cloud repo as it is only used in a cloud context; +// however, it is also utilized by migrations. For this reason, we must maintain the struct +// here ensure everything continues to function as expected. +// TODO: Remove this struct when it is no longer needed for migrations. +type UserTokenRecover struct { + Token string `json:"uid"` + User string `json:"user_id"` + CreatedAt time.Time `json:"created_at" bson:"created_at"` } -type UserAuthClaims struct { - Username string `json:"name"` - Admin bool `json:"admin"` - Tenant string `json:"tenant"` - ID string `json:"id"` - Role string `json:"role"` - AuthClaims `mapstruct:",squash"` - MFA MFA `json:"mfa"` - jwt.RegisteredClaims `mapstruct:",squash"` +// UserChanges specifies the attributes that can be updated for a user. Any zero values in this +// struct must be ignored. If an attribute is a pointer type, its zero value is represented as `nil`. +type UserChanges struct { + LastLogin time.Time `bson:"last_login,omitempty"` + Name string `bson:"name,omitempty"` + Username string `bson:"username,omitempty"` + Email string `bson:"email,omitempty"` + RecoveryEmail string `bson:"recovery_email,omitempty"` + Password string `bson:"password,omitempty"` + Status UserStatus `bson:"status,omitempty"` + ExternalID *string `bson:"external_id,omitempty"` + PreferredNamespace *string `bson:"preferences.preferred_namespace,omitempty"` + MaxNamespaces *int `bson:"max_namespaces,omitempty"` + EmailMarketing *bool `bson:"email_marketing,omitempty"` + AuthMethods []UserAuthMethod `bson:"preferences.auth_methods,omitempty"` } -func (u *UserAuthClaims) SetRegisteredClaims(claims jwt.RegisteredClaims) { - u.RegisteredClaims = claims +// UserConflicts holds user attributes that must be unique for each itam and can be utilized in queries +// to identify conflicts. +type UserConflicts struct { + Email string + Username string } -type UserTokenRecover struct { - Token string `json:"uid"` - User string `json:"user_id"` - CreatedAt time.Time `json:"created_at" bson:"created_at"` +// Distinct removes the c attributes whether it's equal to the user attribute. +func (c *UserConflicts) Distinct(user *User) { + if c.Email == user.Email { + c.Email = "" + } + + if c.Username == user.Username { + c.Username = "" + } +} + +type UserInfo struct { + // OwnedNamespaces are the namespaces where the user is the owner. + OwnedNamespaces []Namespace + // AssociatedNamespaces are the namespaces where the user is a member. + AssociatedNamespaces []Namespace } diff --git a/pkg/revdial/revdial.go b/pkg/revdial/revdial.go index 39fe0911194..8486b81c926 100644 --- a/pkg/revdial/revdial.go +++ b/pkg/revdial/revdial.go @@ -24,7 +24,6 @@ import ( "encoding/json" "errors" "fmt" - "log" "net" "net/http" "strings" @@ -33,12 +32,15 @@ import ( "github.com/gorilla/websocket" "github.com/shellhub-io/shellhub/pkg/clock" + "github.com/shellhub-io/shellhub/pkg/uuid" "github.com/shellhub-io/shellhub/pkg/wsconnadapter" - "github.com/sirupsen/logrus" + log "github.com/sirupsen/logrus" ) -var ErrDialerClosed = errors.New("revdial.Dialer closed") -var ErrDialerTimedout = errors.New("revdial.Dialer timedout") +var ( + ErrDialerClosed = errors.New("revdial.Dialer closed") + ErrDialerTimedout = errors.New("revdial.Dialer timedout") +) // dialerUniqParam is the parameter name of the GET URL form value // containing the Dialer's random unique ID. @@ -54,16 +56,15 @@ type Dialer struct { uniqID string pickupPath string // path + uniqID: "/revdial?revdial.dialer="+uniqID - incomingConn chan net.Conn + incomingConn *sync.Map pickupFailed chan error - connReady chan bool + connReady chan string donec chan struct{} closeOnce sync.Once + logger *log.Entry } -var ( - dialers = sync.Map{} -) +var dialers = sync.Map{} // NewDialer returns the side of the connection which will initiate // new connections. This will typically be the side which did the HTTP @@ -71,15 +72,16 @@ var ( // connection. The connPath is the HTTP path and optional query (but // without scheme or host) on the dialer where the ConnHandler is // mounted. -func NewDialer(c net.Conn, connPath string) *Dialer { +func NewDialer(logger *log.Entry, c net.Conn, connPath string) *Dialer { d := &Dialer{ path: connPath, uniqID: newUniqID(), conn: c, donec: make(chan struct{}), - connReady: make(chan bool), - incomingConn: make(chan net.Conn), + connReady: make(chan string, 8), + incomingConn: new(sync.Map), pickupFailed: make(chan error), + logger: logger, } join := "?" @@ -90,6 +92,8 @@ func NewDialer(c net.Conn, connPath string) *Dialer { d.register() go d.serve() // nolint:errcheck + d.logger.Debug("new dialer connection") + return d } @@ -121,6 +125,8 @@ func (d *Dialer) Close() error { } func (d *Dialer) close() { + d.logger.Debug("dialer connection closed") + d.unregister() d.conn.Close() d.donec <- struct{}{} @@ -129,31 +135,67 @@ func (d *Dialer) close() { // Dial creates a new connection back to the Listener. func (d *Dialer) Dial(ctx context.Context) (net.Conn, error) { + uuid := uuid.Generate() + // First, tell serve that we want a connection: select { - case d.connReady <- true: + case d.connReady <- uuid: + d.logger.Debug("message true to conn ready channel") + + d.incomingConn.Store(uuid, make(chan net.Conn)) + defer d.incomingConn.Delete(uuid) case <-d.donec: + d.logger.Debug("dial done") + return nil, ErrDialerClosed case <-ctx.Done(): + d.logger.Debug("dial done due context cancellation") + return nil, ctx.Err() } + ch, ok := d.incomingConn.Load(uuid) + if !ok { + return nil, errors.New("failed to load the incoming connection map") + } + + connection := ch.(chan net.Conn) + // Then pick it up: select { - case c := <-d.incomingConn: + case c := <-connection: + d.logger.Debug("new incoming connection") + return c, nil case err := <-d.pickupFailed: + d.logger.Debug("failed to pick-up connection") + return nil, err case <-d.donec: + d.logger.Debug("dial done on pick-up") + return nil, ErrDialerClosed case <-ctx.Done(): + d.logger.Debug("dial done on pick-up due context cancellation") + return nil, ctx.Err() } } func (d *Dialer) matchConn(c net.Conn) { + uuid := c.(*wsconnadapter.Adapter).UUID + + ch, ok := d.incomingConn.Load(uuid) + if !ok { + d.logger.Debug("failed to find the incoming connection channel") + + return + } + + connection := ch.(chan net.Conn) + select { - case d.incomingConn <- c: + case connection <- c: case <-d.donec: } } @@ -165,21 +207,24 @@ func (d *Dialer) serve() error { go func() { defer d.Close() + defer d.logger.Debug("dialer serve done") br := bufio.NewReader(d.conn) for { line, err := br.ReadSlice('\n') if err != nil { + d.logger.WithError(err).Trace("failed to read the agent's command") + unexpectedError := websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway, websocket.CloseAbnormalClosure) if !errors.Is(err, net.ErrClosed) && unexpectedError { - logrus.WithError(err).Error("revdial.Dialer failed to read") + d.logger.WithError(err).Error("revdial.Dialer failed to read") } return } var msg controlMsg if err := json.Unmarshal(line, &msg); err != nil { - log.Printf("revdial.Dialer read invalid JSON: %q: %v", line, err) + d.logger.WithError(err).WithField("line", line).Printf("revdial.Dialer read invalid JSON") return } @@ -190,16 +235,21 @@ func (d *Dialer) serve() error { select { case d.pickupFailed <- err: case <-d.donec: + d.logger.WithError(err).Debug("failed to pick-up connection") + return } case "keep-alive": default: // Ignore unknown messages + log.WithField("message", msg.Command).Debug("unknown message received") } } }() for { if err := d.sendMessage(controlMsg{Command: "keep-alive"}); err != nil { + d.logger.WithError(err).Debug("failed to send keep-alive message to device") + return err } @@ -207,12 +257,14 @@ func (d *Dialer) serve() error { select { case <-t.C: continue - case <-d.connReady: + case uuid := <-d.connReady: t.Stop() if err := d.sendMessage(controlMsg{ Command: "conn-ready", - ConnPath: d.pickupPath, + ConnPath: d.pickupPath + fmt.Sprintf("&uuid=%s", uuid), }); err != nil { + d.logger.WithError(err).Debug("failed to send conn-ready message to device") + return err } case <-d.donec: @@ -225,6 +277,8 @@ func (d *Dialer) serve() error { func (d *Dialer) sendMessage(m controlMsg) error { if err := d.conn.SetWriteDeadline(clock.Now().Add(10 * time.Second)); err != nil { + d.logger.WithError(err).Debug("failed to set the write dead line to device") + return err } @@ -232,6 +286,8 @@ func (d *Dialer) sendMessage(m controlMsg) error { j = append(j, '\n') if _, err := d.conn.Write(j); err != nil { + d.logger.WithError(err).Debug("failed to write on the connection") + return err } @@ -449,6 +505,7 @@ func (fakeAddr) String() string { return "revdialconn" } func ConnHandler(upgrader websocket.Upgrader) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { dialerUniq := r.FormValue(dialerUniqParam) + uuid := r.FormValue("uuid") d, ok := dialers.Load(dialerUniq) if !ok { @@ -464,6 +521,9 @@ func ConnHandler(upgrader websocket.Upgrader) http.Handler { return } - d.(*Dialer).matchConn(wsconnadapter.New(wsConn)) + c := wsconnadapter.New(wsConn) + c.UUID = uuid + + d.(*Dialer).matchConn(c) }) } diff --git a/pkg/validator/validator.go b/pkg/validator/validator.go index 6e6424fb30b..a9556214ce8 100644 --- a/pkg/validator/validator.go +++ b/pkg/validator/validator.go @@ -1,12 +1,16 @@ package validator import ( + "crypto/x509" + "encoding/pem" "errors" "fmt" "reflect" "regexp" + "unicode" "github.com/go-playground/validator/v10" + "github.com/shellhub-io/shellhub/pkg/api/authorizer" ) var ( @@ -35,6 +39,9 @@ const ( UserPasswordTag = "password" // DeviceNameTag contains the rule to validate the device's name. DeviceNameTag = "device_name" + // PrivateKeyPEMTag contains the rule to validate a private key. + PrivateKeyPEMTag = "privateKeyPEM" + CertPEMTag = "certPEM" ) // Rules is a slice that contains all validation rules. @@ -72,10 +79,81 @@ var Rules = []Rule{ { Tag: DeviceNameTag, Handler: func(field validator.FieldLevel) bool { - return regexp.MustCompile(`^([a-z0-9_-]){1,64}$`).MatchString(field.Field().String()) + return regexp.MustCompile(`^([a-zA-Z0-9_-]){1,64}$`).MatchString(field.Field().String()) }, Error: fmt.Errorf("the device name can only contain `_`, `-` and alpha numeric characters"), }, + // api-key_name reports whether a given string is a valid name for an api key or not. A valid + // value must be more than 3 characters, less than 20 and does not contains any whitespace. + { + Tag: "api-key_name", + Handler: func(field validator.FieldLevel) bool { + name := field.Field().String() + + if len(name) < 3 || len(name) > 20 { + return false + } + + for _, c := range field.Field().String() { + if unicode.IsSpace(c) { + return false + } + } + + return true + }, + Error: fmt.Errorf("name must contain at least 3 characters, at most 20 characters, and no whitespaces"), + }, + // api-key_expires-at reports whether a given int is in [ 30 60 90 365 -1 ]. + { + Tag: "api-key_expires-at", + Handler: func(field validator.FieldLevel) bool { + if !field.Field().CanInt() { + return false + } + + expiresAt := field.Field().Int() + + return expiresAt == -1 || expiresAt == 30 || expiresAt == 60 || expiresAt == 90 || expiresAt == 365 + }, + Error: fmt.Errorf("expires_at must be in [ -1 30 60 90 365 ]"), + }, + // member_role reports whether a given string is a valid role or not + { + Tag: "member_role", + Handler: func(field validator.FieldLevel) bool { + return authorizer.RoleFromString(field.Field().String()) != authorizer.RoleInvalid + }, + Error: fmt.Errorf("role must be \"owner\", \"administrator\", \"operator\" or \"observer\""), + }, + { + Tag: PrivateKeyPEMTag, + Handler: func(field validator.FieldLevel) bool { + block, _ := pem.Decode([]byte(field.Field().String())) + if block == nil { + return false + } + + key, err := x509.ParsePKCS8PrivateKey(block.Bytes) + + return err == nil && key != nil + }, + Error: fmt.Errorf("the private key is invalid"), + }, + { + Tag: CertPEMTag, + Handler: func(field validator.FieldLevel) bool { + block, _ := pem.Decode([]byte(field.Field().String())) + if block == nil { + return false + } + + cert, err := x509.ParseCertificate(block.Bytes) + + return err == nil && cert != nil + }, + Error: fmt.Errorf("the cert is invalid"), + }, } // Validator is the ShellHub validator. @@ -117,6 +195,23 @@ func (v *Validator) Struct(structure any) (bool, error) { return true, nil } +// StructWithFields validades a structure using ShellHub validation's tags, returnig the invalid fields and its tags. +func (v *Validator) StructWithFields(structure any) (bool, map[string]interface{}, error) { + if err := v.Validate.Struct(structure); err != nil { + fields := make(map[string]interface{}, 0) + + errs := err.(validator.ValidationErrors) + + for _, e := range errs { + fields[e.Field()] = e.Tag() + } + + return false, fields, ErrStructureInvalid + } + + return true, nil, nil +} + // GetTagFromStructure returns the validation's tag from structure. func GetTagFromStructure(structure any, field string) (Tag, bool) { kind := reflect.TypeOf(structure) diff --git a/pkg/validator/validator_test.go b/pkg/validator/validator_test.go index ad29962c2db..00e6cd3ff2e 100644 --- a/pkg/validator/validator_test.go +++ b/pkg/validator/validator_test.go @@ -224,11 +224,6 @@ func TestDeviceName(t *testing.T) { value: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaax", want: false, }, - { - description: "failed when the device name is uppercase", - value: "TEST", - want: false, - }, { description: "failed when the device name contains invalid characters", value: "test$", @@ -265,3 +260,199 @@ func TestDeviceName(t *testing.T) { }) } } + +func TestKeyPEM(t *testing.T) { + tests := []struct { + description string + value string + want bool + }{ + { + description: "failed when the private key is empty", + value: "", + want: false, + }, + { + description: "failed when the private key does not have the header", + value: ` +MC4CAQAwBQYDK2VwBCIEIA2Ecxi0E2XsKUNRYBv98VRbpsjl/kD7l7XOa/aKYitU +-----END PRIVATE KEY-----`, + want: false, + }, + { + description: "failed when the private key does not have the footer", + value: `-----BEGIN PRIVATE KEY----- +MC4CAQAwBQYDK2VwBCIEIA2Ecxi0E2XsKUNRYBv98VRbpsjl/kD7l7XOa/aKYitU +`, + want: false, + }, + { + description: "failed when the private key does not have header neither footer", + value: ` +MC4CAQAwBQYDK2VwBCIEIA2Ecxi0E2XsKUNRYBv98VRbpsjl/kD7l7XOa/aKYitU +`, + want: false, + }, + { + description: "success when the private key is a valid ED25519", + value: `-----BEGIN PRIVATE KEY----- +MC4CAQAwBQYDK2VwBCIEIA2Ecxi0E2XsKUNRYBv98VRbpsjl/kD7l7XOa/aKYitU +-----END PRIVATE KEY-----`, + want: true, + }, + { + description: "success when the private key is a valid RSA4096", + value: `-----BEGIN PRIVATE KEY----- +MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDt92hrf1PDvCAw +NaEv1xjfL2QCyEsA7zxBGPIIA5ETsB41LW3yS98oy8F/L72BDEepmsw49DaQLbIZ +JrjXyT4dtYKN9oPgv5uwwmwPrWexsiDiVA968DOgSWj4S4MIDLAwd3gBqrQgqFut +Iwgt58KzhKYn/J9+1q/G8ecKzRre7c7/PQbCHEH4A/XiIudyuSf49ziU+U7dq9rZ +IAiyG2xMAKZnjANP0dQj8gaAJCD1qesyoIUXrHCuesrZEEY1gov6ZxUeR62KQgIF +JDQ8nrGgPRc/AjNcLhLKH5xaRqfbEv3WyYw1Ag4Fc1ZtIOgLbMr9BRcxnrhCAIBD +4ASU+63N5zxC/K0JOPy4iSa8+uMXoYD4eJIUI4e9cuAp976zCsrd6d2QEDZmly2/ +KGrcTunlNQ49LfqV9LQWnumRoQ5vhlOHWAQmY48svf45PGeQrrbLUfV24uO4Zzwn +CCCHBUUUwTlasZi1zwHgZ1rmqOjemnGn6HJ9T64tFypUQKOiS5NxeAajszQLf3Gf +IE8ZibE+uxZQyvRexmyUt+RaOQfyAKtnczyOd9LU4/JqVtbKYtuxltw503gS+Ruz +xcHuFEv/takSszbr9mKAj/pT0MEKE9nJLP2gcqw0j2fdjfWWejPGwWlxJ98sPlw8 +eh4KNOtphFmgbjIUTrjfS6G+3cbOwQIDAQABAoICAHDw8hnHCjoFcR+AbJqYk6Dl +zKk3Z8WvReE9li2wh6wY9BVYFO0hDm692f3j6iSz79Uy94d2fOkMDxG525Pq2vTd +v3NiUzAZsKqBdCkyq1reiJXywJAgLdh+zve9Wxi4cOzn3sinvKsdTLmNPWYQL8vl +ArlKwGZCPZYGHJp3QzAYHRzt2WXKZJLySkKEP2YnM64Jo8ys0L4LwSg4+HeT5V/j +FRdjD/VTyMQwq94oh44hEdRq9BAK00Y0WE8SVsgxx/7V6uN+sIJEltHa34H/7Zz4 +Ma7BfB/dbCSLQTllfGhRCLHm4YkNCxuSJKxRqGA3x9Wzk1EFHD2TIE1WpsYQ92ku +ZrYt9XsVQVEvoJpo9qfpJwtYkbSJIcOVzRSuPX5xb3q+rPT1aGfJPtZUtfwokL0O +iRK60eGntenSlJNPrbgTjr2JULd4rlZy4CGYy6frVBCYjDr/f+Li25Ya17VCezZV +1R9TbTORaKlbTc0gonaXuVX5G23DdrpMFvlBspL8fx4c9Ewy+8D9EdO5w2j+pFaI +rj7JL4hTIWKv8YG3jACuXvGKy9ikQXq1h6hDtpeqJ1y7CGq1JIEWh4IGHZTHA+WD +kRPe0YtZ5092OZcT43h8Gr/Qg4nS0qwmUc5eEs33F3PKumzNeZ2cfHiTLWd5PRMW +WBWu+o/bN79VANWQiCtDAoIBAQD73UbT++YxfM51I62XGjjdc8CSZd0a2zk3nvpo +8JeBrWnfefmRuA2QaLyC+u9py5RTHMeq1EjncMBE48LSaRUisjfORtJ+D6ZUovGm +++BJKBt/VuBu3Opnrz/opscWJhVzwPoMa/oKvkhA02dS+y+sQ7feUJm3nkVQ3peq +U/WDtEFWgqHa89SPssNYdH7t4M9OX/L0q1hN6LN1umvPUm4P2vT/d58EaxxuQ4Z6 +qtfFSr6IRBChPUOoVCZPmB81I9qDyU8sbnZsl8evxZ/cwMrJn1GdEcAm/9r/+K05 +HCw1Whs6ZVepqYf5yX84V7FNoar16txMQGJaWHfFgouumMSPAoIBAQDx37XtoC+n +FRCRVjcAc86GXaH5g3fFU6seg1Mkoe4H3vA7EMosZKJ37V8G7lTyR5C4BFIcRyX9 +bWXpP2Aubyqq4aq6wunratU8VgdmboKh1ADQ/tQd9HCNpJtAmI8hfan3Vxv4lKED +WgcraaWHa7VOrjfJsaMC9SV9vDBVNfY+dzz4OZEafjKGySkTMoBrWfEfO+Q0sVDR +acmE/g3cTEjlvDarWG5yquSBEidO/4eZRhyx76wERAi77eOUGak83rOoaRdfrWim +Zi6C8H/5hvhrBSn+TbUK05rF9vVvrs1kRB4qgnFm4aFFbKLyjuHEtkulM1BvMR+a +15l/ES7ikv+vAoIBAQDIYOd0x7gALzdiYpw81xPeu7S9xGUAdOE0qzq2OpOPDBRr +Q3OWx0OjXHB+FH5dQSYkaYVBF9tYpo+RP1NEa23xSLC1YAsfV/wQ4gI3w7RQ/6PA +z7GHAiNLklXaFrXVnT779M/7CfzIh1KcoJRXpJftCYNDUAS73SNwj2dCj8GIouRI +m22B8PNvz90yhpxlTLIhvJxio9+BPF1qkIItU3tVCfJZPSY6Ma1Q3FAlT76SrECh +0OUaIs+tICXKtVA+yuOSbZqb0tZM1wR7h1MEIi4z8pjPycuCO5RUidfm088oMyPu +daokxUf1JqYcgUgCZ1jIha32zFJzZmcDsDTJF6lpAoIBAAwBc7FQ0yyy8fiU0/QU +y3qF6UVOTkKgLY09LYJS+1KusTPtWGutrxbO1HmumM7R2JAZvs2ihnM22+kg+TA0 +2mRTATt181B5JA5zorhl4dwQft3g2DyIZpHRSteA+xHJgAdD7qJ/FiLpdBOmkc3P +/dbi9OfxBkteSbcdATUpkYh2OLOFf/tVqkJgd8Z5KkCp3TsUqPYomv9aBeOxDJUT +wEaO+hO1Nv5AF0mE0iisrFliTohSgjJQAjL50uMGBw17bGV+medo3xnrVoGvWFrV +ZT1Cq1vxFXxtFnCfGn2pqo5Ah1LK2MAnkO62PrxVdUVjWwvfKS3rvUrdSsQw4Sfj +7gcCggEAJk/ydgLGXs1Ti5g5yxe8HkrOM/zycUymeSt3j0EpfXYQEPKmS/337kpT +VvMc7QlFZnjdidRrlCxqnLJZ8kcbLDMRikU+IWikpWUBvlk3mSp3Z98otz1OBBJV +C08w1DePdRSEJgiMdqfjtIg6Dg9R0CpaQ/YLolkkhJ5LekaBvQJqNQT7wgG9NHvG +5p5q2wJfrbxoZX2gGRuqMhNfx9pJJbZdP08DWfeja8MG+JkZqMiKEDPlZTWHSLf3 +uccmoL1Os2G6iqnhL+rIFf637U2B/DinlaODYsM1b96MrrpLgBHU/4OcwsN0t751 +rRrVfCKhbJKpjAZq5U9VKt9LcGe9kA== +-----END PRIVATE KEY-----`, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + data := struct { + Key string `validate:"required,privateKeyPEM"` + }{ + Key: tt.value, + } + + ok, _ := New().Struct(data) + + assert.Equal(t, tt.want, ok) + }) + } +} + +func TestCertPEM(t *testing.T) { + tests := []struct { + description string + value string + want bool + }{ + { + description: "failed when the cert is empty", + value: "", + want: false, + }, + { + description: "failed when the cert does not have the header", + value: ` +MC4CAQAwBQYDK2VwBCIEIA2Ecxi0E2XsKUNRYBv98VRbpsjl/kD7l7XOa/aKYitU +-----END CERTIFICATE-----`, + want: false, + }, + { + description: "failed when the cert does not have the footer", + value: `-----BEGIN CERTIFICATE----- +MC4CAQAwBQYDK2VwBCIEIA2Ecxi0E2XsKUNRYBv98VRbpsjl/kD7l7XOa/aKYitU +`, + want: false, + }, + { + description: "failed when the cert does not have header neither footer", + value: ` +MC4CAQAwBQYDK2VwBCIEIA2Ecxi0E2XsKUNRYBv98VRbpsjl/kD7l7XOa/aKYitU +`, + want: false, + }, + { + description: "success when the cert is a valid", + value: `-----BEGIN CERTIFICATE----- +MIIFUTCCAzmgAwIBAgIUGOBHWPTiCbwt8iLWYNZwKTDbONUwDQYJKoZIhvcNAQEL +BQAwWzELMAkGA1UEBhMCQlIxDjAMBgNVBAgMBUJhaGlhMRQwEgYDVQQHDAtYaXF1 +ZS1YaXF1ZTEQMA4GA1UECgwHSGVucnknczEUMBIGA1UEAwwLZGVsbGcxNTU1MTAw +HhcNMjQwNTI5MTk1NzA0WhcNMjUwNTI5MTk1NzA0WjARMQ8wDQYDVQQDDAZjbGll +bnQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDt92hrf1PDvCAwNaEv +1xjfL2QCyEsA7zxBGPIIA5ETsB41LW3yS98oy8F/L72BDEepmsw49DaQLbIZJrjX +yT4dtYKN9oPgv5uwwmwPrWexsiDiVA968DOgSWj4S4MIDLAwd3gBqrQgqFutIwgt +58KzhKYn/J9+1q/G8ecKzRre7c7/PQbCHEH4A/XiIudyuSf49ziU+U7dq9rZIAiy +G2xMAKZnjANP0dQj8gaAJCD1qesyoIUXrHCuesrZEEY1gov6ZxUeR62KQgIFJDQ8 +nrGgPRc/AjNcLhLKH5xaRqfbEv3WyYw1Ag4Fc1ZtIOgLbMr9BRcxnrhCAIBD4ASU ++63N5zxC/K0JOPy4iSa8+uMXoYD4eJIUI4e9cuAp976zCsrd6d2QEDZmly2/KGrc +TunlNQ49LfqV9LQWnumRoQ5vhlOHWAQmY48svf45PGeQrrbLUfV24uO4ZzwnCCCH +BUUUwTlasZi1zwHgZ1rmqOjemnGn6HJ9T64tFypUQKOiS5NxeAajszQLf3GfIE8Z +ibE+uxZQyvRexmyUt+RaOQfyAKtnczyOd9LU4/JqVtbKYtuxltw503gS+RuzxcHu +FEv/takSszbr9mKAj/pT0MEKE9nJLP2gcqw0j2fdjfWWejPGwWlxJ98sPlw8eh4K +NOtphFmgbjIUTrjfS6G+3cbOwQIDAQABo1cwVTATBgNVHSUEDDAKBggrBgEFBQcD +AjAdBgNVHQ4EFgQUzvw/tD0WsD5q2K2wSokjLEReY6wwHwYDVR0jBBgwFoAU9Nw4 +MqfdGEeRWXI2H1ChuK2k9qEwDQYJKoZIhvcNAQELBQADggIBAIQp2CQyPjaqbXZc +hiR0VWwAyifttrHJJ59VCFovH4/LW8oPbg8w7JP4bfm9iTbo7yTqDV6BfOWat4Qf +T5o0HVcmxKEY7X6bEAmTFfSsNs6NTuaIE8QSFpJpKvLGIjulSqhayjSPuqJavluc +lGa1vUPeIqZAKPDFwrdqMXg/Q7DMhg9su7QPfNVu2E2Hrq++PaXPnWZlu3/yu5FH +2qjoS/xeG8QL8STzqVxqsmcGXkI8FYT2Goidb5eNPSqJflntgm0FzZ/YYvCpZbdC +8/Qjg+CnopfuyLS72iZvW4tSv/9plBsiu6UqhbjBz9xQZbBDpvUOyUvK+L8URmWB +21xTMtqdqk3iG3qAFGnaz0EM0Tg4MEopzYMieob2XoxjSH55ykj33LF/sZeNVPzK +gXi2bqLzL5I1kTPF+Irrg5z7FBTcXRVdPcvqjxGfbyVVmaxNmC26ozIF94rYUOIr +JeUB+pKG1xX/fhUAMeLvEkJ6GOl6ldnTqPJrNAZzwAqW5ra0H9kIbmf1fGPpezaa +KdtGUV3wYjChWAuSa0S3mP1qD+sRNS5NtR7efemmoUbR+hCg2Vyo5osRSJ9dkQJf +PNcoe7LEpZdYQvPI5v1fqVcFpOdOCckDdaGb3XPpd69LGdFD0jHOzF9eIavv9ewV +eiDIAGdPArZi+JWdNsp+TK4MJjcy +-----END CERTIFICATE-----`, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + data := struct { + Cert string `validate:"required,certPEM"` + }{ + Cert: tt.value, + } + + ok, _ := New().Struct(data) + + assert.Equal(t, tt.want, ok) + }) + } +} diff --git a/pkg/websocket/mocks/conn.go b/pkg/websocket/mocks/conn.go new file mode 100644 index 00000000000..6a175156dc6 --- /dev/null +++ b/pkg/websocket/mocks/conn.go @@ -0,0 +1,445 @@ +// Code generated by mockery v2.50.2. DO NOT EDIT. + +package mocks + +import ( + io "io" + + gorillawebsocket "github.com/gorilla/websocket" + + mock "github.com/stretchr/testify/mock" + + net "net" + + time "time" +) + +// Conn is an autogenerated mock type for the Conn type +type Conn struct { + mock.Mock +} + +// Close provides a mock function with no fields +func (_m *Conn) Close() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Close") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// CloseHandler provides a mock function with no fields +func (_m *Conn) CloseHandler() func(int, string) error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for CloseHandler") + } + + var r0 func(int, string) error + if rf, ok := ret.Get(0).(func() func(int, string) error); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(func(int, string) error) + } + } + + return r0 +} + +// EnableWriteCompression provides a mock function with given fields: enable +func (_m *Conn) EnableWriteCompression(enable bool) { + _m.Called(enable) +} + +// LocalAddr provides a mock function with no fields +func (_m *Conn) LocalAddr() net.Addr { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for LocalAddr") + } + + var r0 net.Addr + if rf, ok := ret.Get(0).(func() net.Addr); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(net.Addr) + } + } + + return r0 +} + +// NextReader provides a mock function with no fields +func (_m *Conn) NextReader() (int, io.Reader, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for NextReader") + } + + var r0 int + var r1 io.Reader + var r2 error + if rf, ok := ret.Get(0).(func() (int, io.Reader, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() int); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func() io.Reader); ok { + r1 = rf() + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(io.Reader) + } + } + + if rf, ok := ret.Get(2).(func() error); ok { + r2 = rf() + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// NextWriter provides a mock function with given fields: messageType +func (_m *Conn) NextWriter(messageType int) (io.WriteCloser, error) { + ret := _m.Called(messageType) + + if len(ret) == 0 { + panic("no return value specified for NextWriter") + } + + var r0 io.WriteCloser + var r1 error + if rf, ok := ret.Get(0).(func(int) (io.WriteCloser, error)); ok { + return rf(messageType) + } + if rf, ok := ret.Get(0).(func(int) io.WriteCloser); ok { + r0 = rf(messageType) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(io.WriteCloser) + } + } + + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(messageType) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PingHandler provides a mock function with no fields +func (_m *Conn) PingHandler() func(string) error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for PingHandler") + } + + var r0 func(string) error + if rf, ok := ret.Get(0).(func() func(string) error); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(func(string) error) + } + } + + return r0 +} + +// PongHandler provides a mock function with no fields +func (_m *Conn) PongHandler() func(string) error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for PongHandler") + } + + var r0 func(string) error + if rf, ok := ret.Get(0).(func() func(string) error); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(func(string) error) + } + } + + return r0 +} + +// ReadJSON provides a mock function with given fields: _a0 +func (_m *Conn) ReadJSON(_a0 interface{}) error { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for ReadJSON") + } + + var r0 error + if rf, ok := ret.Get(0).(func(interface{}) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ReadMessage provides a mock function with no fields +func (_m *Conn) ReadMessage() (int, []byte, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for ReadMessage") + } + + var r0 int + var r1 []byte + var r2 error + if rf, ok := ret.Get(0).(func() (int, []byte, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() int); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func() []byte); ok { + r1 = rf() + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).([]byte) + } + } + + if rf, ok := ret.Get(2).(func() error); ok { + r2 = rf() + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// RemoteAddr provides a mock function with no fields +func (_m *Conn) RemoteAddr() net.Addr { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for RemoteAddr") + } + + var r0 net.Addr + if rf, ok := ret.Get(0).(func() net.Addr); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(net.Addr) + } + } + + return r0 +} + +// SetCloseHandler provides a mock function with given fields: h +func (_m *Conn) SetCloseHandler(h func(int, string) error) { + _m.Called(h) +} + +// SetCompressionLevel provides a mock function with given fields: level +func (_m *Conn) SetCompressionLevel(level int) error { + ret := _m.Called(level) + + if len(ret) == 0 { + panic("no return value specified for SetCompressionLevel") + } + + var r0 error + if rf, ok := ret.Get(0).(func(int) error); ok { + r0 = rf(level) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// SetPingHandler provides a mock function with given fields: h +func (_m *Conn) SetPingHandler(h func(string) error) { + _m.Called(h) +} + +// SetPongHandler provides a mock function with given fields: h +func (_m *Conn) SetPongHandler(h func(string) error) { + _m.Called(h) +} + +// SetReadDeadline provides a mock function with given fields: t +func (_m *Conn) SetReadDeadline(t time.Time) error { + ret := _m.Called(t) + + if len(ret) == 0 { + panic("no return value specified for SetReadDeadline") + } + + var r0 error + if rf, ok := ret.Get(0).(func(time.Time) error); ok { + r0 = rf(t) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// SetReadLimit provides a mock function with given fields: limit +func (_m *Conn) SetReadLimit(limit int64) { + _m.Called(limit) +} + +// SetWriteDeadline provides a mock function with given fields: t +func (_m *Conn) SetWriteDeadline(t time.Time) error { + ret := _m.Called(t) + + if len(ret) == 0 { + panic("no return value specified for SetWriteDeadline") + } + + var r0 error + if rf, ok := ret.Get(0).(func(time.Time) error); ok { + r0 = rf(t) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Subprotocol provides a mock function with no fields +func (_m *Conn) Subprotocol() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Subprotocol") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// UnderlyingConn provides a mock function with no fields +func (_m *Conn) UnderlyingConn() net.Conn { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for UnderlyingConn") + } + + var r0 net.Conn + if rf, ok := ret.Get(0).(func() net.Conn); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(net.Conn) + } + } + + return r0 +} + +// WriteControl provides a mock function with given fields: messageType, data, deadline +func (_m *Conn) WriteControl(messageType int, data []byte, deadline time.Time) error { + ret := _m.Called(messageType, data, deadline) + + if len(ret) == 0 { + panic("no return value specified for WriteControl") + } + + var r0 error + if rf, ok := ret.Get(0).(func(int, []byte, time.Time) error); ok { + r0 = rf(messageType, data, deadline) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WriteMessage provides a mock function with given fields: messageType, data +func (_m *Conn) WriteMessage(messageType int, data []byte) error { + ret := _m.Called(messageType, data) + + if len(ret) == 0 { + panic("no return value specified for WriteMessage") + } + + var r0 error + if rf, ok := ret.Get(0).(func(int, []byte) error); ok { + r0 = rf(messageType, data) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// WritePreparedMessage provides a mock function with given fields: pm +func (_m *Conn) WritePreparedMessage(pm *gorillawebsocket.PreparedMessage) error { + ret := _m.Called(pm) + + if len(ret) == 0 { + panic("no return value specified for WritePreparedMessage") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*gorillawebsocket.PreparedMessage) error); ok { + r0 = rf(pm) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewConn creates a new instance of Conn. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewConn(t interface { + mock.TestingT + Cleanup(func()) +}) *Conn { + mock := &Conn{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/websocket/mocks/upgrader.go b/pkg/websocket/mocks/upgrader.go new file mode 100644 index 00000000000..b17e5f8b484 --- /dev/null +++ b/pkg/websocket/mocks/upgrader.go @@ -0,0 +1,59 @@ +// Code generated by mockery v2.50.2. DO NOT EDIT. + +package mocks + +import ( + http "net/http" + + websocket "github.com/shellhub-io/shellhub/pkg/websocket" + mock "github.com/stretchr/testify/mock" +) + +// Upgrader is an autogenerated mock type for the Upgrader type +type Upgrader struct { + mock.Mock +} + +// Upgrade provides a mock function with given fields: res, req +func (_m *Upgrader) Upgrade(res http.ResponseWriter, req *http.Request) (websocket.Conn, error) { + ret := _m.Called(res, req) + + if len(ret) == 0 { + panic("no return value specified for Upgrade") + } + + var r0 websocket.Conn + var r1 error + if rf, ok := ret.Get(0).(func(http.ResponseWriter, *http.Request) (websocket.Conn, error)); ok { + return rf(res, req) + } + if rf, ok := ret.Get(0).(func(http.ResponseWriter, *http.Request) websocket.Conn); ok { + r0 = rf(res, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(websocket.Conn) + } + } + + if rf, ok := ret.Get(1).(func(http.ResponseWriter, *http.Request) error); ok { + r1 = rf(res, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewUpgrader creates a new instance of Upgrader. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewUpgrader(t interface { + mock.TestingT + Cleanup(func()) +}) *Upgrader { + mock := &Upgrader{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/websocket/utils.go b/pkg/websocket/utils.go new file mode 100644 index 00000000000..28068619fd0 --- /dev/null +++ b/pkg/websocket/utils.go @@ -0,0 +1,13 @@ +package websocket + +import "github.com/gorilla/websocket" + +// IsErrorCloseNormal returns true if it the error received contains a 1000 as its code, as specified by in RFC 6455, +// section 11.7 for a normal close message. +func IsErrorCloseNormal(err error) bool { + return websocket.IsCloseError(err, websocket.CloseNormalClosure) +} + +func IsUnexpectedCloseError(err error) bool { + return websocket.IsUnexpectedCloseError(err) +} diff --git a/pkg/websocket/websocket.go b/pkg/websocket/websocket.go new file mode 100644 index 00000000000..bce41656b00 --- /dev/null +++ b/pkg/websocket/websocket.go @@ -0,0 +1,66 @@ +package websocket + +import ( + "io" + "net" + "net/http" + "time" + + "github.com/gorilla/websocket" +) + +// Conn represents a WebSocket connection. +// +//go:generate mockery --name=Conn --filename=conn.go +type Conn interface { + Close() error + LocalAddr() net.Addr + RemoteAddr() net.Addr + UnderlyingConn() net.Conn + + Subprotocol() string + + NextWriter(messageType int) (io.WriteCloser, error) + WriteMessage(messageType int, data []byte) error + WriteControl(messageType int, data []byte, deadline time.Time) error + WritePreparedMessage(pm *websocket.PreparedMessage) error + SetWriteDeadline(t time.Time) error + EnableWriteCompression(enable bool) + SetCompressionLevel(level int) error + + NextReader() (messageType int, r io.Reader, err error) + ReadMessage() (messageType int, p []byte, err error) + SetReadDeadline(t time.Time) error + SetReadLimit(limit int64) + + SetCloseHandler(h func(code int, text string) error) + CloseHandler() func(code int, text string) error + SetPingHandler(h func(appData string) error) + PingHandler() func(appData string) error + SetPongHandler(h func(appData string) error) + PongHandler() func(appData string) error + + ReadJSON(any) error +} + +// Upgrader should be implemented by structures that want to be able to convert an HTTP request into WebSocket connection. +// +//go:generate mockery --name=Upgrader --filename=upgrader.go +type Upgrader interface { + Upgrade(res http.ResponseWriter, req *http.Request) (Conn, error) +} + +// GorillaUpgrader implements [Upgrader] using Gorilla's WebSocket implementation. +type GorillaUpgrader struct { + upgrader *websocket.Upgrader +} + +func (u *GorillaUpgrader) Upgrade(res http.ResponseWriter, req *http.Request) (Conn, error) { + return u.upgrader.Upgrade(res, req, nil) +} + +func NewGorillaWebSocketUpgrader() Upgrader { + return &GorillaUpgrader{ + upgrader: new(websocket.Upgrader), + } +} diff --git a/pkg/worker/asynq/client.go b/pkg/worker/asynq/client.go new file mode 100644 index 00000000000..901c621be2b --- /dev/null +++ b/pkg/worker/asynq/client.go @@ -0,0 +1,57 @@ +package asynq + +import ( + "context" + + "github.com/hibiken/asynq" + "github.com/shellhub-io/shellhub/pkg/worker" +) + +type client struct { + asynqClient *asynq.Client +} + +func NewClient(redisURI string) (worker.Client, error) { + opt, err := asynq.ParseRedisURI(redisURI) + if err != nil { + return nil, err + } + + c := &client{asynqClient: asynq.NewClient(opt)} + + if c.asynqClient == nil { + return nil, worker.ErrClientStartFailed + } + + return c, nil +} + +func (c *client) Close() error { + return c.asynqClient.Close() +} + +func (c *client) Submit(ctx context.Context, pattern worker.TaskPattern, payload []byte) error { + if !pattern.Validate() { + return worker.ErrTaskPatternInvalid + } + + task := asynq.NewTask(pattern.String(), payload) + if _, err := c.asynqClient.EnqueueContext(ctx, task, asynq.Queue(pattern.Queue())); err != nil { + return worker.ErrSubmitFailed + } + + return nil +} + +func (c *client) SubmitToBatch(ctx context.Context, pattern worker.TaskPattern, payload []byte) error { + if !pattern.Validate() { + return worker.ErrTaskPatternInvalid + } + + task := asynq.NewTask(pattern.String(), payload) + if _, err := c.asynqClient.EnqueueContext(ctx, task, asynq.Queue(pattern.Queue()), asynq.Group(pattern.String())); err != nil { + return worker.ErrSubmitFailed + } + + return nil +} diff --git a/pkg/worker/asynq/client_test.go b/pkg/worker/asynq/client_test.go new file mode 100644 index 00000000000..ba139c1b39f --- /dev/null +++ b/pkg/worker/asynq/client_test.go @@ -0,0 +1,65 @@ +package asynq_test + +import ( + "context" + "runtime" + "testing" + "time" + + asynqlib "github.com/hibiken/asynq" + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/worker/asynq" + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go/modules/redis" +) + +func TestClient(t *testing.T) { + t.Parallel() + ctx := context.Background() + + image := "docker.io/redis:7" + if envs.DefaultBackend.Get("CI") == "true" { + image = "registry.infra.ossystems.io/cache/redis:7" + } + + redisContainer, err := redis.Run(ctx, image) + require.NoError(t, err) + + t.Cleanup(func() { + require.NoError(t, redisContainer.Terminate(ctx)) + }) + + redisConnStr, err := redisContainer.ConnectionString(ctx) + require.NoError(t, err) + + // Setup server and handlers + addr, err := asynqlib.ParseRedisURI(redisConnStr) + require.NoError(t, err) + + asynqMux := asynqlib.NewServeMux() + asynqSrv := asynqlib.NewServer( + addr, + asynqlib.Config{ //nolint:exhaustruct + Concurrency: runtime.NumCPU(), + Queues: map[string]int{"queue": 1}, + }, + ) + + assertTaskPayload := "" + asynqMux.HandleFunc("queue:kind", func(_ context.Context, t *asynqlib.Task) error { + assertTaskPayload = string(t.Payload()) + + return nil + }) + + require.NoError(t, asynqSrv.Start(asynqMux)) + + // Setup client + client, err := asynq.NewClient(redisConnStr) + require.NoError(t, err) + defer client.Close() + + require.NoError(t, client.Submit(ctx, "queue:kind", []byte("task was called"))) + time.Sleep(10 * time.Second) + require.Equal(t, "task was called", assertTaskPayload) +} diff --git a/pkg/worker/asynq/cron.go b/pkg/worker/asynq/cron.go new file mode 100644 index 00000000000..58205e93ba5 --- /dev/null +++ b/pkg/worker/asynq/cron.go @@ -0,0 +1,14 @@ +package asynq + +import ( + "github.com/shellhub-io/shellhub/pkg/worker" +) + +// Unique configures a cron job to prevent concurrent processing. +// When enabled, the job will not be enqueued or executed again until it completes +// or the timeout specified in `SHELLHUB_ASYNQ_UNIQUENESS_TIMEOUT` is reached. +func Unique() worker.CronjobOption { + return func(c *worker.Cronjob) { + c.Unique = true + } +} diff --git a/pkg/worker/asynq/server.go b/pkg/worker/asynq/server.go new file mode 100644 index 00000000000..839c4002384 --- /dev/null +++ b/pkg/worker/asynq/server.go @@ -0,0 +1,162 @@ +package asynq + +import ( + "runtime" + "time" + + "github.com/hibiken/asynq" + "github.com/shellhub-io/shellhub/pkg/uuid" + "github.com/shellhub-io/shellhub/pkg/worker" +) + +type ServerOption func(s *server) error + +// BatchConfig sets the batch configuration of the server. It's required when +// setting a task with [BatchTask] option. +// +// maxSize is the maximum number of tasks that a batch task can handle before +// processing. +// +// maxDelay is the maximum amount of time that a batch task can wait before +// processing. +// +// gracePeriod is the amount of time that the server will wait before aggregating +// batch tasks. +func BatchConfig(maxSize, maxDelay, gracePeriod int) ServerOption { + return func(s *server) error { + s.batchConfig.maxSize = maxSize + s.batchConfig.maxDelay = time.Second * time.Duration(maxDelay) + s.batchConfig.gracePeriod = time.Second * time.Duration(gracePeriod) + + return nil + } +} + +// UniquenessTimeout defines the maximum duration, in hours, for which a unique job remains locked +// in the queue. If the job does not complete within this timeout, the lock is released, allowing +// a new instance of the job to be enqueued and executed. +func UniquenessTimeout(timeout int) ServerOption { + return func(s *server) error { + s.uniquenessTimeout = timeout + + return nil + } +} + +type server struct { + redisURI string + asynqSrv *asynq.Server + asynqMux *asynq.ServeMux + asynqSch *asynq.Scheduler + batchConfig *batchConfig + uniquenessTimeout int + + queues queues + tasks []worker.Task + cronjobs []worker.Cronjob +} + +func NewServer(redisURI string, opts ...ServerOption) worker.Server { + s := &server{ + redisURI: redisURI, + queues: queues{cronQueue: 1}, + tasks: []worker.Task{}, + cronjobs: []worker.Cronjob{}, + batchConfig: &batchConfig{}, + } + + for _, opt := range opts { + if err := opt(s); err != nil { + return nil // NOTE: currently all opts returns nil + } + } + + return s +} + +func (s *server) HandleTask(pattern worker.TaskPattern, handler worker.TaskHandler, opts ...worker.TaskOption) { + pattern.MustValidate() + + if _, ok := s.queues[pattern.Queue()]; !ok { + s.queues[pattern.Queue()] = 1 + } + + task := worker.Task{Pattern: pattern, Handler: handler} + for _, opt := range opts { + opt(&task) + } + + s.tasks = append(s.tasks, task) +} + +func (s *server) HandleCron(spec worker.CronSpec, handler worker.CronHandler, opts ...worker.CronjobOption) { + spec.MustValidate() + + cronjob := worker.Cronjob{ + Identifier: uuid.Generate(), + Spec: spec, + Handler: handler, + } + + for _, opt := range opts { + opt(&cronjob) + } + + s.cronjobs = append(s.cronjobs, cronjob) +} + +func (s *server) Start() error { + if err := s.setupAsynq(); err != nil { + return err + } + + if err := s.asynqSrv.Start(s.asynqMux); err != nil { + return worker.ErrServerStartFailed + } + + if err := s.asynqSch.Start(); err != nil { + return worker.ErrServerStartFailed + } + + return nil +} + +func (s *server) Shutdown() { + s.asynqSrv.Shutdown() + s.asynqSch.Shutdown() +} + +func (s *server) setupAsynq() error { + addr, err := asynq.ParseRedisURI(s.redisURI) + if err != nil { + return err + } + + s.asynqSch = asynq.NewScheduler(addr, nil) + s.asynqMux = asynq.NewServeMux() + s.asynqSrv = asynq.NewServer( + addr, + asynq.Config{ //nolint:exhaustruct + Concurrency: runtime.NumCPU(), + Queues: s.queues, + GroupAggregator: asynq.GroupAggregatorFunc(aggregate), + GroupMaxSize: s.batchConfig.maxSize, + GroupMaxDelay: s.batchConfig.maxDelay, + GroupGracePeriod: s.batchConfig.gracePeriod, + }, + ) + + for _, t := range s.tasks { + s.asynqMux.HandleFunc(t.Pattern.String(), taskToAsynq(t.Handler)) + } + + for _, c := range s.cronjobs { + s.asynqMux.HandleFunc(c.Identifier, cronToAsynq(c.Handler)) + task := asynq.NewTask(c.Identifier, nil, asynq.Queue(cronQueue)) + if _, err := s.asynqSch.Register(c.Spec.String(), task, buildCronOptions(s, &c)...); err != nil { //nolint:gosec + return worker.ErrHandleCronFailed + } + } + + return nil +} diff --git a/pkg/worker/asynq/server_test.go b/pkg/worker/asynq/server_test.go new file mode 100644 index 00000000000..7b633cd4266 --- /dev/null +++ b/pkg/worker/asynq/server_test.go @@ -0,0 +1,65 @@ +package asynq_test + +import ( + "context" + "testing" + "time" + + asynqlib "github.com/hibiken/asynq" + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/worker/asynq" + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go/modules/redis" +) + +func TestServer(t *testing.T) { + t.Parallel() + ctx := context.Background() + + image := "docker.io/redis:7" + if envs.DefaultBackend.Get("CI") == "true" { + image = "registry.infra.ossystems.io/cache/redis:7" + } + + redisContainer, err := redis.Run(ctx, image) + require.NoError(t, err) + + t.Cleanup(func() { + require.NoError(t, redisContainer.Terminate(ctx)) + }) + + redisConnStr, err := redisContainer.ConnectionString(ctx) + require.NoError(t, err) + + // Setup server and handlers + srv := asynq.NewServer(redisConnStr, asynq.BatchConfig(2, 1, 1)) + defer srv.Shutdown() + + assertTaskPayload := "" + srv.HandleTask("queue:task", func(_ context.Context, payload []byte) error { + assertTaskPayload = string(payload) + + return nil + }) + + assertCronPayload := "" + srv.HandleCron("* * * * *", func(_ context.Context) error { + assertCronPayload = "cron was called" + + return nil + }) + + require.NoError(t, srv.Start()) + + // Setup asynq client and enqueue task + opt, err := asynqlib.ParseRedisURI(redisConnStr) + require.NoError(t, err) + asynqClient := asynqlib.NewClient(opt) + _, err = asynqClient.Enqueue(asynqlib.NewTask("queue:task", []byte("task was called")), asynqlib.Queue("queue")) + require.NoError(t, err) + + // Assert that tasks was called. We sleep for 1 minute to wait the server process the cronjob + time.Sleep(1 * time.Minute) + require.Equal(t, assertTaskPayload, "task was called") + require.Equal(t, assertCronPayload, "cron was called") +} diff --git a/pkg/worker/asynq/task.go b/pkg/worker/asynq/task.go new file mode 100644 index 00000000000..48c51d859b8 --- /dev/null +++ b/pkg/worker/asynq/task.go @@ -0,0 +1,22 @@ +package asynq + +import ( + "github.com/shellhub-io/shellhub/pkg/worker" +) + +// BatchTask configures a task to process a list of tasks in batches. +// Each task payload will be aggregated, separated by '\n'. Example: +// +// func(ctx context.Context, payload []byte) error { +// scanner := bufio.NewScanner(bytes.NewReader(payload)) +// scanner.Split(bufio.ScanLines) +// +// for scanner.Scan() { +// // Process each task payload +// } +// } +func BatchTask() worker.TaskOption { + return func(t *worker.Task) { + t.Pattern += ":batch" + } +} diff --git a/pkg/worker/asynq/utils.go b/pkg/worker/asynq/utils.go new file mode 100644 index 00000000000..45189c2bb4b --- /dev/null +++ b/pkg/worker/asynq/utils.go @@ -0,0 +1,67 @@ +package asynq + +import ( + "bytes" + "context" + "time" + + "github.com/hibiken/asynq" + "github.com/shellhub-io/shellhub/pkg/worker" +) + +// batchConfig configures the asynq batch settings. +type batchConfig struct { + // maxSize is the maximum number of tasks that a batch task can handle before + // processing. + maxSize int + // maxDelay is the maximum amount of time that a batch task can wait before + // processing. + maxDelay time.Duration + // gracePeriod is the amount of time that the server will wait before aggregating + // batch tasks. + gracePeriod time.Duration +} + +// queues is a map of queues where the key is the name and the value is the priority. +type queues map[string]int + +// cronQueue is the queue where's all the cronjobs will send tasks. +const cronQueue = "cron" + +// aggregate is the handler that Asynq will execute to aggregate the tasks. +// It will combine all task payloads into one, separated by '\n', and then +// execute a new task with the name "{group}+:aggregated". +func aggregate(group string, tasks []*asynq.Task) *asynq.Task { + buf := new(bytes.Buffer) + for _, t := range tasks { + buf.Write(t.Payload()) + buf.WriteByte('\n') + } + + return asynq.NewTask(group+":batch", buf.Bytes()) +} + +// cronToAsynq converts a [github.com/shellhub-io/shellhub/pkg/api/worker.CronHandler] to an asynq handler. +func cronToAsynq(h worker.CronHandler) func(context.Context, *asynq.Task) error { + return func(ctx context.Context, _ *asynq.Task) error { + return h(ctx) + } +} + +// buildCronOptions generates a slice of asynq.Options for configuring a cron job. +func buildCronOptions(s *server, c *worker.Cronjob) []asynq.Option { + opts := make([]asynq.Option, 0) + + if c.Unique && s.uniquenessTimeout > 0 { + opts = append(opts, asynq.Unique(time.Duration(s.uniquenessTimeout)*time.Hour)) + } + + return opts +} + +// taskToAsynq converts a [github.com/shellhub-io/shellhub/pkg/api/worker.TaskHandler] to an asynq handler. +func taskToAsynq(h worker.TaskHandler) func(context.Context, *asynq.Task) error { + return func(ctx context.Context, task *asynq.Task) error { + return h(ctx, task.Payload()) + } +} diff --git a/pkg/worker/client.go b/pkg/worker/client.go new file mode 100644 index 00000000000..e4569524904 --- /dev/null +++ b/pkg/worker/client.go @@ -0,0 +1,22 @@ +package worker + +import ( + "context" +) + +// Client represents a client that submits tasks to be handled by the server. +type Client interface { + // Submit sends a payload to be processed by the task handler registered with the specified pattern. + // The task will be executed immediately if it matches the pattern. + // + // It returns an error if the pattern is invalid or if there is an issue submitting the task. + Submit(ctx context.Context, pattern TaskPattern, payload []byte) error + // SubmitToBatch sends a payload to be added to a batch for processing. The task handler registered with + // the specified pattern will process the batch either when a series of payloads have been enqueued + // or when the specified time delay is reached. + // + // It returns an error if the pattern is invalid or if there is an issue submitting the task to the batch. + SubmitToBatch(ctx context.Context, pattern TaskPattern, payload []byte) error + // Close closes the client's connection. + Close() error +} diff --git a/pkg/worker/cron.go b/pkg/worker/cron.go new file mode 100644 index 00000000000..73a2c35c904 --- /dev/null +++ b/pkg/worker/cron.go @@ -0,0 +1,45 @@ +package worker + +import ( + "context" + + "github.com/adhocore/gronx" +) + +type CronSpec string + +func (cs CronSpec) String() string { + return string(cs) +} + +// _gron is only used to validate cron expressions. It's initialized outside of the +// checker to avoid multiples allocs of the struct. +var _gron = gronx.New() + +// Validate reports whether the pattern is valid or not. +func (cs CronSpec) Validate() bool { + return _gron.IsValid(cs.String()) +} + +// MustValidate is similar to [CronSpec.Validate] but panics when invalid. +func (cs CronSpec) MustValidate() { + if !cs.Validate() { + panic("invalid cron specification: " + cs) + } +} + +type CronHandler func(ctx context.Context) error + +type Cronjob struct { + // Identifier is a UUID for the cron job, used internally to register the task with the + // scheduler. + Identifier string + // Spec is the cron expression that defines the schedule for the cron job. + Spec CronSpec + // Handler is the callback function that will be executed when the cron specification is met. + Handler CronHandler + // Unique defines whether the task cannot be perfomed concurrently. + Unique bool +} + +type CronjobOption func(c *Cronjob) diff --git a/pkg/worker/errors.go b/pkg/worker/errors.go new file mode 100644 index 00000000000..0b485b3f667 --- /dev/null +++ b/pkg/worker/errors.go @@ -0,0 +1,12 @@ +package worker + +import "errors" + +var ( + ErrHandleCronFailed = errors.New("failed to handle cron") + ErrServerStartFailed = errors.New("failed to start the worker server") + ErrClientStartFailed = errors.New("failed to start the worker client") + ErrTaskPatternInvalid = errors.New("task pattern is invalid") + ErrCronSpecInvalid = errors.New("cron specification is invalid") + ErrSubmitFailed = errors.New("failed to submit the payload") +) diff --git a/pkg/worker/mocks/client.go b/pkg/worker/mocks/client.go new file mode 100644 index 00000000000..e75b9ea1ff9 --- /dev/null +++ b/pkg/worker/mocks/client.go @@ -0,0 +1,83 @@ +// Code generated by mockery v2.43.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + worker "github.com/shellhub-io/shellhub/pkg/worker" + mock "github.com/stretchr/testify/mock" +) + +// Client is an autogenerated mock type for the Client type +type Client struct { + mock.Mock +} + +// Close provides a mock function with given fields: +func (_m *Client) Close() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Close") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Submit provides a mock function with given fields: ctx, pattern, payload +func (_m *Client) Submit(ctx context.Context, pattern worker.TaskPattern, payload []byte) error { + ret := _m.Called(ctx, pattern, payload) + + if len(ret) == 0 { + panic("no return value specified for Submit") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, worker.TaskPattern, []byte) error); ok { + r0 = rf(ctx, pattern, payload) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// SubmitToBatch provides a mock function with given fields: ctx, pattern, payload +func (_m *Client) SubmitToBatch(ctx context.Context, pattern worker.TaskPattern, payload []byte) error { + ret := _m.Called(ctx, pattern, payload) + + if len(ret) == 0 { + panic("no return value specified for SubmitToBatch") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, worker.TaskPattern, []byte) error); ok { + r0 = rf(ctx, pattern, payload) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewClient creates a new instance of Client. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewClient(t interface { + mock.TestingT + Cleanup(func()) +}) *Client { + mock := &Client{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/worker/mocks/server.go b/pkg/worker/mocks/server.go new file mode 100644 index 00000000000..6e3a99e219c --- /dev/null +++ b/pkg/worker/mocks/server.go @@ -0,0 +1,67 @@ +// Code generated by mockery v2.43.1. DO NOT EDIT. + +package mocks + +import ( + worker "github.com/shellhub-io/shellhub/pkg/worker" + mock "github.com/stretchr/testify/mock" +) + +// Server is an autogenerated mock type for the Server type +type Server struct { + mock.Mock +} + +// HandleCron provides a mock function with given fields: spec, cronFunc +func (_m *Server) HandleCron(spec worker.CronSpec, cronFunc worker.CronHandler) { + _m.Called(spec, cronFunc) +} + +// HandleTask provides a mock function with given fields: pattern, cb, opts +func (_m *Server) HandleTask(pattern worker.TaskPattern, cb worker.TaskHandler, opts ...worker.TaskOption) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, pattern, cb) + _ca = append(_ca, _va...) + _m.Called(_ca...) +} + +// Shutdown provides a mock function with given fields: +func (_m *Server) Shutdown() { + _m.Called() +} + +// Start provides a mock function with given fields: +func (_m *Server) Start() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewServer creates a new instance of Server. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewServer(t interface { + mock.TestingT + Cleanup(func()) +}) *Server { + mock := &Server{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/worker/server.go b/pkg/worker/server.go new file mode 100644 index 00000000000..9fb324b1a76 --- /dev/null +++ b/pkg/worker/server.go @@ -0,0 +1,23 @@ +package worker + +// Server represents a process that handles tasks and cronjobs. A [Client] can submit +// tasks. +type Server interface { + // HandleTask registers a task with the provided pattern. The task will be executed + // every time a client enqueues a payload matching the pattern. Batch tasks will be executed + // when the client enqueues a series of payloads or when the specified time delay is reached. + // + // It panics if the pattern is invalid. Batch tasks are specified in options. + HandleTask(pattern TaskPattern, cb TaskHandler, opts ...TaskOption) + // HandleCron executes the cronFunc every time the cron specification is met. + // + // It panics if the cron specification is invalid. + HandleCron(spec CronSpec, cronFunc CronHandler, opts ...CronjobOption) + // Start initializes and starts the worker in a non-blocking manner. The server is + // turned off whedn the context was done. + // + // It returns an error if any issues occur during the startup process. + Start() error + // Shutdown gracefully shuts down the server. + Shutdown() +} diff --git a/pkg/worker/task.go b/pkg/worker/task.go new file mode 100644 index 00000000000..91e59a96d21 --- /dev/null +++ b/pkg/worker/task.go @@ -0,0 +1,42 @@ +package worker + +import ( + "context" + "strings" +) + +// TaskPattern represents a pattern to which a task can register to listen. +// It must be in the format "queue:kind". +type TaskPattern string + +func (tp TaskPattern) String() string { + return string(tp) +} + +// Validate reports whether the pattern is valid or not. +func (tp TaskPattern) Validate() bool { + return len(strings.Split(string(tp), ":")) == 2 +} + +// MustValidate is similar to [TaskPattern.Validate] but panics when invalid. +func (tp TaskPattern) MustValidate() { + if !tp.Validate() { + panic("invalid task pattern: " + tp) + } +} + +// Queue returns the queue component of the pattern. +func (tp TaskPattern) Queue() string { + return strings.Split(string(tp), ":")[0] +} + +type TaskHandler func(ctx context.Context, payload []byte) error + +type Task struct { + // Pattern is a string to which the task can listen to message/events. + Pattern TaskPattern + // Handler is the callback that the task will execute when receiving messages/events. + Handler TaskHandler +} + +type TaskOption func(t *Task) diff --git a/pkg/wsconnadapter/wsconnadapter.go b/pkg/wsconnadapter/wsconnadapter.go index 19ab06b57ca..e43c481ea97 100644 --- a/pkg/wsconnadapter/wsconnadapter.go +++ b/pkg/wsconnadapter/wsconnadapter.go @@ -4,11 +4,13 @@ import ( "errors" "io" "net" + "os" "sync" "time" "github.com/gorilla/websocket" - "github.com/sirupsen/logrus" + "github.com/shellhub-io/shellhub/pkg/clock" + log "github.com/sirupsen/logrus" ) // an adapter for representing WebSocket connection as a net.Conn @@ -22,17 +24,48 @@ const ( ) type Adapter struct { + UUID string conn *websocket.Conn readMutex sync.Mutex writeMutex sync.Mutex reader io.Reader stopPingCh chan struct{} pongCh chan bool + Logger *log.Entry + CreatedAt time.Time } -func New(conn *websocket.Conn) *Adapter { +type Option func(*Adapter) + +func WithID(id string) Option { + return func(a *Adapter) { + a.UUID = id + } +} + +func WithDevice(tenant string, device string) Option { + return func(a *Adapter) { + a.Logger = a.Logger.WithFields(log.Fields{ + "tenant": tenant, + "device": device, + }) + } +} + +func New(conn *websocket.Conn, options ...Option) *Adapter { adapter := &Adapter{ conn: conn, + Logger: log.NewEntry(&log.Logger{ + Out: os.Stderr, + Formatter: log.StandardLogger().Formatter, + Hooks: log.StandardLogger().Hooks, + Level: log.StandardLogger().Level, + }), + CreatedAt: clock.Now(), + } + + for _, option := range options { + option(adapter) } return adapter @@ -40,6 +73,8 @@ func New(conn *websocket.Conn) *Adapter { func (a *Adapter) Ping() chan bool { if a.pongCh != nil { + a.Logger.Debug("pong channel is not null") + return a.pongCh } @@ -47,15 +82,19 @@ func (a *Adapter) Ping() chan bool { a.pongCh = make(chan bool) timeout := time.AfterFunc(pongTimeout, func() { + a.Logger.Debug("close connection due pong timeout") + _ = a.Close() }) - a.conn.SetPongHandler(func(data string) error { + a.conn.SetPongHandler(func(_ string) error { timeout.Reset(pongTimeout) + a.Logger.Trace("pong timeout") // non-blocking channel write select { case a.pongCh <- true: + a.Logger.Trace("write true to pong channel") default: } @@ -71,9 +110,11 @@ func (a *Adapter) Ping() chan bool { select { case <-ticker.C: if err := a.conn.WriteControl(websocket.PingMessage, []byte{}, time.Now().Add(5*time.Second)); err != nil { - logrus.WithError(err).Error("Failed to write ping message") + a.Logger.WithError(err).Error("failed to write ping message") } case <-a.stopPingCh: + a.Logger.Debug("stop ping message received") + return } } @@ -112,6 +153,10 @@ func (a *Adapter) Read(b []byte) (int, error) { } } + a.Logger.WithError(err). + WithField("bytes", bytesRead). + Trace("bytes read from wsconnadapter") + return bytesRead, err } @@ -121,22 +166,31 @@ func (a *Adapter) Write(b []byte) (int, error) { nextWriter, err := a.conn.NextWriter(websocket.BinaryMessage) if err != nil { + a.Logger.WithError(err).Trace("failed to get the next writer") + return 0, err } bytesWritten, err := nextWriter.Write(b) nextWriter.Close() + a.Logger.WithError(err). + WithField("bytes", bytesWritten). + Trace("bytes written from wsconnadapter") + return bytesWritten, err } func (a *Adapter) Close() error { select { case <-a.stopPingCh: + a.Logger.Debug("stop ping message received") default: if a.stopPingCh != nil { a.stopPingCh <- struct{}{} close(a.stopPingCh) + + a.Logger.Debug("stop ping channel closed") } } @@ -153,6 +207,8 @@ func (a *Adapter) RemoteAddr() net.Addr { func (a *Adapter) SetDeadline(t time.Time) error { if err := a.SetReadDeadline(t); err != nil { + a.Logger.WithError(err).Trace("failed to set the deadline") + return err } diff --git a/ssh/.air.toml b/ssh/.air.toml new file mode 100644 index 00000000000..c59fdac1dfc --- /dev/null +++ b/ssh/.air.toml @@ -0,0 +1,32 @@ +root = "../" +tmp_dir = "tmp" + +[build] +pre_cmd = [] +cmd = "go build -gcflags=\"all=-N -l\" -o ./tmp/main ." +post_cmd = [] +bin = "" +full_bin = "dlv exec ./tmp/main" +args_bin = [ + "--listen=0.0.0.0:2345", + "--headless", + "--continue", + "--accept-multiclient", +] +delay = 500 +exclude_dir = ["assets", "tmp", "vendor", "testdata"] +exclude_file = [] +exclude_regex = ["_test.go"] +exclude_unchanged = false +follow_symlink = false +include_dir = [] +include_ext = ["go", "tpl", "tmpl", "html"] +include_file = [] +kill_delay = "0s" +log = "build-errors.log" +poll = false +poll_interval = 0 +rerun = false +rerun_delay = 500 +send_interrupt = false +stop_on_error = false diff --git a/ssh/Dockerfile b/ssh/Dockerfile index 24bb2694f7b..25da247ff4a 100644 --- a/ssh/Dockerfile +++ b/ssh/Dockerfile @@ -1,5 +1,5 @@ # base stage -FROM golang:1.20.4-alpine3.16 AS base +FROM golang:1.25-alpine3.22 AS base ARG GOPROXY @@ -40,8 +40,9 @@ ARG GOPROXY ENV GOPROXY ${GOPROXY} RUN apk add --update openssl -RUN go install github.com/markbates/refresh@v1.11.1 && \ - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.53.3 && \ +RUN go install github.com/air-verse/air@v1.62 && \ + go install github.com/go-delve/delve/cmd/dlv@v1.25 && \ + go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.1.6 && \ go install github.com/vektra/mockery/v2/...@v2.20.0 WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub @@ -55,7 +56,7 @@ WORKDIR $GOPATH/src/github.com/shellhub-io/shellhub/ssh ENTRYPOINT ["/entrypoint.sh"] # production stage -FROM alpine:3.19.0 AS production +FROM alpine:3.23.2 AS production RUN apk add curl @@ -63,4 +64,4 @@ RUN apk add --update openssh-client COPY --from=builder /go/src/github.com/shellhub-io/shellhub/ssh/ssh /ssh -ENTRYPOINT /ssh +ENTRYPOINT ["/ssh"] diff --git a/ssh/entrypoint-dev.sh b/ssh/entrypoint-dev.sh index 5d9569a0444..3b99b01910d 100755 --- a/ssh/entrypoint-dev.sh +++ b/ssh/entrypoint-dev.sh @@ -6,7 +6,7 @@ mkdir -p /var/run/secrets if [ ! -f /var/run/secrets/ssh_private_key ]; then echo "Generating private key" - openssl genrsa -out /var/run/secrets/ssh_private_key 2048 + openssl genpkey -algorithm RSA -out /var/run/secrets/ssh_private_key -pkeyopt rsa_keygen_bits:2048 fi -refresh run +air diff --git a/ssh/go.mod b/ssh/go.mod index b588523aaaf..aca92d43955 100644 --- a/ssh/go.mod +++ b/ssh/go.mod @@ -1,58 +1,70 @@ module github.com/shellhub-io/shellhub/ssh -go 1.20 +go 1.24.9 require ( github.com/Masterminds/semver v1.5.0 - github.com/gliderlabs/ssh v0.3.6 - github.com/go-resty/resty/v2 v2.11.0 - github.com/golang-jwt/jwt v3.2.2+incompatible - github.com/labstack/echo-contrib v0.15.0 - github.com/labstack/echo/v4 v4.11.4 - github.com/pires/go-proxyproto v0.7.0 + github.com/gliderlabs/ssh v0.3.8 + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/gorilla/websocket v1.5.3 + github.com/hashicorp/yamux v0.1.2 + github.com/labstack/echo-contrib v0.17.4 + github.com/labstack/echo/v4 v4.15.0 + github.com/multiformats/go-multistream v0.6.1 + github.com/pires/go-proxyproto v0.9.2 github.com/shellhub-io/shellhub v0.13.4 - github.com/sirupsen/logrus v1.9.3 - github.com/stretchr/testify v1.8.4 - golang.org/x/crypto v0.18.0 - golang.org/x/net v0.20.0 + github.com/sirupsen/logrus v1.9.4 + github.com/stretchr/testify v1.11.1 + golang.org/x/crypto v0.47.0 + golang.org/x/net v0.49.0 + golang.org/x/time v0.14.0 ) require ( + github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect + github.com/Microsoft/hcsshim v0.12.2 // indirect + github.com/adhocore/gronx v1.8.1 // indirect github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/go-ole/go-ole v1.3.0 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.11.2 // indirect github.com/go-redis/cache/v8 v8.4.4 // indirect github.com/go-redis/redis/v8 v8.11.5 // indirect - github.com/golang-jwt/jwt/v4 v4.5.0 // indirect - github.com/golang/protobuf v1.5.2 // indirect - github.com/google/uuid v1.2.0 // indirect - github.com/gorilla/websocket v1.5.0 // indirect + github.com/go-resty/resty/v2 v2.11.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/google/uuid v1.6.0 // indirect github.com/hibiken/asynq v0.24.1 // indirect - github.com/klauspost/compress v1.16.0 // indirect + github.com/klauspost/compress v1.18.0 // indirect github.com/labstack/gommon v0.4.2 // indirect github.com/leodido/go-urn v1.2.2 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect + github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/multiformats/go-varint v0.0.6 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/redis/go-redis/v9 v9.0.3 // indirect github.com/robfig/cron/v3 v3.0.1 // indirect github.com/sethvargo/go-envconfig v0.9.0 // indirect + github.com/shirou/gopsutil/v3 v3.24.3 // indirect github.com/spf13/cast v1.3.1 // indirect - github.com/stretchr/objx v0.5.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/tklauser/go-sysconf v0.3.13 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect github.com/vmihailenco/go-tinylfu v0.2.2 // indirect github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sync v0.1.0 // indirect - golang.org/x/sys v0.16.0 // indirect - golang.org/x/text v0.14.0 // indirect - golang.org/x/time v0.5.0 // indirect - google.golang.org/protobuf v1.28.1 // indirect + go.uber.org/goleak v1.3.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.40.0 // indirect + golang.org/x/text v0.33.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/ssh/go.sum b/ssh/go.sum index 3e68f62bfe5..3517ba147ff 100644 --- a/ssh/go.sum +++ b/ssh/go.sum @@ -1,27 +1,66 @@ +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.12.2 h1:AcXy+yfRvrx20g9v7qYaJv5Rh+8GaHOS6b8G6Wx/nKs= +github.com/Microsoft/hcsshim v0.12.2/go.mod h1:RZV12pcHCXQ42XnlQ3pz6FZfmrC1C+R4gaOHhRNML1g= +github.com/adhocore/gronx v1.8.1 h1:F2mLTG5sB11z7vplwD4iydz3YCEjstSfYmCrdSm3t6A= +github.com/adhocore/gronx v1.8.1/go.mod h1:7oUY1WAU8rEJWmAxXR2DN0JaO4gi9khSgKjiRypqteg= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/bsm/ginkgo/v2 v2.7.0 h1:ItPMPH90RbmZJt5GtkcNvIRuGEdwlBItdNVoyzaNQao= github.com/bsm/ginkgo/v2 v2.7.0/go.mod h1:AiKlXPm7ItEHNc/2+OkrNG4E0ITzojb9/xWzvQ9XZ9w= github.com/bsm/gomega v1.26.0 h1:LhQm+AFcgV2M0WyKroMASzAzCAJVpAxQXv4SaI9a69Y= github.com/bsm/gomega v1.26.0/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/containerd/containerd v1.7.29 h1:90fWABQsaN9mJhGkoVnuzEY+o1XDPbg9BTC9QTAHnuE= +github.com/containerd/containerd v1.7.29/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v28.0.0+incompatible h1:Olh0KS820sJ7nPsBKChVhk5pzqcwDR15fumfAd/p9hM= +github.com/docker/docker v28.0.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/gliderlabs/ssh v0.3.6 h1:ZzjlDa05TcFRICb3anf/dSPN3ewz1Zx6CMLPWgkm3b8= -github.com/gliderlabs/ssh v0.3.6/go.mod h1:zpHEXBstFnQYtGnB8k8kQLol82umzn/2/snG7alWVD8= +github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= +github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= @@ -36,10 +75,10 @@ github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq github.com/go-resty/resty/v2 v2.11.0 h1:i7jMfNOJYMp69lq7qozJP+bjgzfAzeOhuGlyDrqxT/8= github.com/go-resty/resty/v2 v2.11.0/go.mod h1:iiP/OpA0CkcL3IGt1O0+/SIItFUbkkyw5BGXiVdTu+A= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= -github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= @@ -48,44 +87,72 @@ github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:W github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= -github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= +github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= github.com/hibiken/asynq v0.24.1 h1:+5iIEAyA9K/lcSPvx3qoPtsKJeKI5u9aOIvUmSsazEw= github.com/hibiken/asynq v0.24.1/go.mod h1:u5qVeSbrnfT+vtG5Mq8ZPzQu/BmCKMHvTGb91uy9Tts= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.16.0 h1:iULayQNOReoYUe+1qtKOqw9CwJv3aNQu8ivo7lw1HU4= -github.com/klauspost/compress v1.16.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo-contrib v0.15.0 h1:9K+oRU265y4Mu9zpRDv3X+DGTqUALY6oRHCSZZKCRVU= -github.com/labstack/echo-contrib v0.15.0/go.mod h1:lei+qt5CLB4oa7VHTE0yEfQSEB9XTJI1LUqko9UWvo4= -github.com/labstack/echo/v4 v4.11.4 h1:vDZmA+qNeh1pd/cCkEicDMrjtrnMGQ1QFI9gWN1zGq8= -github.com/labstack/echo/v4 v4.11.4/go.mod h1:noh7EvLwqDsmh/X/HWKPUl1AjzJrhyptRyEbQJfxen8= +github.com/labstack/echo-contrib v0.17.4 h1:g5mfsrJfJTKv+F5uNKCyrjLK7js+ZW6HTjg4FnDxxgk= +github.com/labstack/echo-contrib v0.17.4/go.mod h1:9O7ZPAHUeMGTOAfg80YqQduHzt0CzLak36PZRldYrZ0= +github.com/labstack/echo/v4 v4.15.0 h1:hoRTKWcnR5STXZFe9BmYun9AMTNeSbjHi2vtDuADJ24= +github.com/labstack/echo/v4 v4.15.0/go.mod h1:xmw1clThob0BSVRX1CRQkGQ/vjwcpOMjQZSZa9fKA/c= github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0= github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU= github.com/leodido/go-urn v1.2.2 h1:7z68G0FCGvDk646jz1AelTYNYWrTNm0bEcFAo147wt4= github.com/leodido/go-urn v1.2.2/go.mod h1:kUaIbLZWttglzwNuG0pgsh5vuV6u2YcGBYz1hIPjtOQ= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74 h1:1KuuSOy4ZNgW0KA2oYIngXVFhQcXxhLqCVK7cBcldkk= +github.com/lufia/plan9stats v0.0.0-20240408141607-282e7b5d6b74/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/user v0.3.0 h1:9ni5DlcW5an3SvRSx4MouotOygvzaXbaSrc/wGDFWPo= +github.com/moby/sys/user v0.3.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/multiformats/go-multistream v0.6.1 h1:4aoX5v6T+yWmc2raBHsTvzmFhOI8WVOer28DeBBEYdQ= +github.com/multiformats/go-multistream v0.6.1/go.mod h1:ksQf6kqHAb6zIsyw7Zm+gAuVo57Qbq84E27YlYqavqw= +github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY= +github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= @@ -93,31 +160,49 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.15.0/go.mod h1:cIuvLEne0aoVhAgh/O6ac0Op8WWw9H6eYCriF+tEHG0= github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= -github.com/pires/go-proxyproto v0.7.0 h1:IukmRewDQFWC7kfnb66CSomk2q/seBuilHBYFwyq0Hs= -github.com/pires/go-proxyproto v0.7.0/go.mod h1:Vz/1JPY/OACxWGQNIRY2BeyDmpoaWmEP40O9LbuiFR4= +github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/pires/go-proxyproto v0.9.2 h1:H1UdHn695zUVVmB0lQ354lOWHOy6TZSpzBl3tgN0s1U= +github.com/pires/go-proxyproto v0.9.2/go.mod h1:ZKAAyp3cgy5Y5Mo4n9AlScrkCZwUy0g3Jf+slqQVcuU= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/redis/go-redis/v9 v9.0.3 h1:+7mmR26M0IvyLxGZUHxu4GiBkJkVDid0Un+j4ScYu4k= github.com/redis/go-redis/v9 v9.0.3/go.mod h1:WqMKv5vnQbRuZstUwxQI195wHy+t4PuXDOjzMvcuQHk= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/rwtodd/Go.Sed v0.0.0-20210816025313-55464686f9ef/go.mod h1:8AEUvGVi2uQ5b24BIhcr0GCcpd/RNAFWaN2CJFrWIIQ= github.com/sethvargo/go-envconfig v0.9.0 h1:Q6FQ6hVEeTECULvkJZakq3dZMeBQ3JUpcKMfPQbKMDE= github.com/sethvargo/go-envconfig v0.9.0/go.mod h1:Iz1Gy1Sf3T64TQlJSvee81qDhf7YIlt8GMUX6yyNFs0= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/shirou/gopsutil/v3 v3.24.3 h1:eoUGJSmdfLzJ3mxIhmOAhgKEKgQkeOwKpz1NbhVnuPE= +github.com/shirou/gopsutil/v3 v3.24.3/go.mod h1:JpND7O217xa72ewWz9zN2eIIkPWsDN/3pl0H8Qt0uwg= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= +github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w= +github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g= github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -125,8 +210,20 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/testcontainers/testcontainers-go v0.32.0 h1:ug1aK08L3gCHdhknlTTwWjPHPS+/alvLJU/DRxTD/ME= +github.com/testcontainers/testcontainers-go v0.32.0/go.mod h1:CRHrzHLQhlXUsa5gXjTOfqIEJcrK5+xMDmBr/WMI88E= +github.com/testcontainers/testcontainers-go/modules/redis v0.32.0 h1:HW5Qo9qfLi5iwfS7cbXwG6qe8ybXGePcgGPEmVlVDlo= +github.com/testcontainers/testcontainers-go/modules/redis v0.32.0/go.mod h1:5kltdxVKZG0aP1iegeqKz4K8HHyP0wbkW5o84qLyMjY= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/go-sysconf v0.3.13 h1:GBUpcahXSpR2xN01jhkNAbTLRk2Yzgggk8IM08lq3r4= +github.com/tklauser/go-sysconf v0.3.13/go.mod h1:zwleP4Q4OehZHGn4CYZDipCgg9usW5IJePewFCGVEa0= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/tklauser/numcpus v0.7.0 h1:yjuerZP127QG9m5Zh/mSO4wqurYil27tHrqwRoRjpr4= +github.com/tklauser/numcpus v0.7.0/go.mod h1:bb6dMVcj8A42tSE7i32fsIUCbQNllK5iDguyOZRUzAY= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= @@ -141,15 +238,28 @@ github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 h1:Xs2Ncz0gNihqu9iosIZ5SkBbWo5T8JhhLJFMQL1qmLI= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0/go.mod h1:vy+2G/6NvVMpwGX/NyLqcC41fxepnuKHk16E6IZUcJc= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= +golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= @@ -168,46 +278,52 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= -golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= +golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= +golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= -golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= +golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= +golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -215,12 +331,12 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= +golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= -golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -240,12 +356,13 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= diff --git a/ssh/http/handlers.go b/ssh/http/handlers.go new file mode 100644 index 00000000000..bc5615c01a6 --- /dev/null +++ b/ssh/http/handlers.go @@ -0,0 +1,302 @@ +package http + +import ( + "io" + "net/http" + "net/url" + "strings" + "sync" + "time" + + "github.com/labstack/echo/v4" + "github.com/shellhub-io/shellhub/pkg/api/internalclient" + "github.com/shellhub-io/shellhub/pkg/wsconnadapter" + "github.com/shellhub-io/shellhub/ssh/pkg/dialer" + log "github.com/sirupsen/logrus" +) + +type Handlers struct { + Config *Config + Dialer *dialer.Dialer + Client internalclient.Client +} + +const ( + // HandleSSHClosePath receives a request to close an existing SSH session. + HandleSSHClosePath = "/api/sessions/:uid/close" + // HandleHTTPProxyPath proxies an inbound HTTP request to a device's HTTP server. + HandleHTTPProxyPath = "/http/proxy" + // HandleHealthcheckPath is used for readiness/liveness checks. + HandleHealthcheckPath = "/healthcheck" +) + +const ( + // HandleConnectionV1Path is the connection endpoint where agents using revdial connects to establish + // a WebSocket connection. Each new logical session requires an extra reverse dial handshake. + HandleConnectionV1Path = "/ssh/connection" + // HandleConnectionV2Path is the connection endpoint where agents using yamux/multistream connects to + // establish a WebSocket connection. Subsequent logical streams are opened without additional HTTP + // handshakes and are protocol-negotiated via multistream-select. + HandleConnectionV2Path = "/agent/connection" +) + +const ( + // HandleRevdialPath is the reverse dial endpoint where agents using revdial requests a new logical + // session. + HandleRevdialPath = "/ssh/revdial" +) + +// HandleSSHClose receives a notification from the agent that an SSH +// session should be closed. It dials the device (choosing the correct +// transport version) and then performs the version-specific close +// sequence: HTTP GET for V1 or multistream + JSON payload for V2. +func (h *Handlers) HandleSSHClose(c echo.Context) error { + var data struct { + UID string `param:"uid"` + Device string `json:"device"` + } + + if err := c.Bind(&data); err != nil { + return err + } + + ctx := c.Request().Context() + + tenant := c.Request().Header.Get("X-Tenant-ID") + + if _, err := h.Dialer.DialTo(ctx, tenant, data.Device, dialer.SSHCloseTarget{SessionID: data.UID}); err != nil { + log.WithError(err).Error("failed to send ssh close message") + + return ErrDeviceTunnelDial + } + + return c.NoContent(http.StatusOK) +} + +// HandleHTTPProxy proxies an inbound HTTP request to a device's HTTP +// service exposed through the reverse tunnel/web endpoint feature. It +// supports both transport versions: +// - V1: issues a CONNECT prelude then performs a standard HTTP request over the established raw tunnel. +// - V2: negotiates the /http/proxy multistream protocol and exchanges a JSON envelope to set up the target host/port. +// +// The handler then hijacks the Echo response writer to stream data +// bidirectionally between client and device. +func (h *Handlers) HandleHTTPProxy(c echo.Context) error { + requestID := c.Request().Header.Get("X-Request-ID") + + address := c.Request().Header.Get("X-Address") + log.WithFields(log.Fields{ + "request-id": requestID, + "address": address, + }).Debug("address value") + + path := c.Request().Header.Get("X-Path") + log.WithFields(log.Fields{ + "request-id": requestID, + "address": address, + }).Debug("path") + + endpoint, err := h.Client.LookupWebEndpoints(c.Request().Context(), address) + if err != nil { + log.WithError(err).Error("failed to get the web endpoint") + + return c.JSON(http.StatusForbidden, NewMessageFromError(ErrWebEndpointForbidden)) + } + + logger := log.WithFields(log.Fields{ + "request-id": requestID, + "namespace": endpoint.Namespace, + "device": endpoint.DeviceUID, + }) + + conn, err := h.Dialer.DialTo(c.Request().Context(), endpoint.Namespace, endpoint.DeviceUID, dialer.HTTPProxyTarget{ + RequestID: requestID, + Host: endpoint.Host, + Port: endpoint.Port, + }) + if err != nil { + logger.WithError(err).Error("failed to dial to device") + + return c.JSON(http.StatusForbidden, NewMessageFromError(ErrDeviceTunnelDial)) + } + defer conn.Close() + + logger.Trace("new web endpoint connection initialized") + defer logger.Trace("web endpoint connection doned") + + req := c.Request() + req.Host = strings.Join([]string{address, h.Config.WebEndpointsDomain}, ".") + req.URL, err = url.Parse(path) + if err != nil { + logger.WithError(err).Error("failed to parse the path") + + return c.JSON(http.StatusInternalServerError, NewMessageFromError(ErrDeviceTunnelReadResponse)) + } + + if err := req.Write(conn); err != nil { + logger.WithError(err).Error("failed to write the request to the agent") + + return c.JSON(http.StatusInternalServerError, NewMessageFromError(ErrDeviceTunnelWriteRequest)) + } + + log.WithFields(log.Fields{ + "request-id": requestID, + "method": req.Method, + "url": req.URL.String(), + "host": req.Host, + "headers": req.Header, + }).Debug("request to device") + + ctr := http.NewResponseController(c.Response()) + out, _, err := ctr.Hijack() + if err != nil { + logger.WithError(err).Error("failed to hijack the http request") + + return c.JSON(http.StatusInternalServerError, NewMessageFromError(ErrDeviceTunnelHijackRequest)) + } + + defer out.Close() + + // Bidirectional copy between the client and the device. + var wg sync.WaitGroup + wg.Add(2) + + starTime := time.Now() + + go func() { + defer wg.Done() + + if _, err := io.Copy(conn, out); err != nil { + logger.WithError(err).Debug("in and out done returned a error") + } + + logger.Trace("in and out done") + }() + + go func() { + defer wg.Done() + + if _, err := io.Copy(out, conn); err != nil { + logger.WithError(err).Debug("out and in done returned a error") + } + + logger.Trace("out and in done") + }() + + wg.Wait() + + logger.WithFields(log.Fields{ + "duration": time.Since(starTime).String(), + }).Info("web endpoint request completed") + + return nil +} + +// HandleHealthcheck returns a simple 200 OK used for readiness/liveness +// checks. +func (h *Handlers) HandleHealthcheck(c echo.Context) error { + return c.String(http.StatusOK, "OK") +} + +// HandleConnectionV1 upgrades the HTTP connection to WebSocket and +// registers a legacy (V1) reverse dialer for the agent. Each new logical +// session requires an extra reverse dial handshake. +func (h *Handlers) HandleConnectionV1(c echo.Context) error { + conn, err := upgrader.Upgrade(c.Response(), c.Request(), nil) + if err != nil { + return c.String(http.StatusInternalServerError, err.Error()) + } + + requestID := c.Request().Header.Get("X-Request-ID") + + tenant := c.Request().Header.Get("X-Tenant-ID") + uid := c.Request().Header.Get("X-Device-UID") + + // WARN: In versions before 0.15, the agent's authentication may not provide the "X-Tenant-ID" header. + // This can cause issues with establishing sessions and tracking online devices. To solve this, + // we retrieve the tenant ID by querying the API. Maybe this can be removed in a future release. + if tenant == "" { + device, err := h.Client.GetDevice(c.Request().Context(), uid) + if err != nil { + log.WithError(err). + WithField("uid", uid). + Error("unable to retrieve device's tenant id") + + return err + } + + tenant = device.TenantID + } + + h.Dialer.Manager.Set( + dialer.NewKey(tenant, uid), + wsconnadapter.New( + conn, + wsconnadapter.WithID(requestID), + wsconnadapter.WithDevice(tenant, uid), + ), + HandleRevdialPath, + ) + + return nil +} + +type HandleConnectionV2Data struct { + RequestID string `header:"x-request-id" validate:"required"` + UID string `header:"x-device-uid" validate:"required,len=64"` + Tenant string `header:"x-tenant-id" validate:"required,uuid"` +} + +// HandleConnectionV2 upgrades the HTTP connection to WebSocket and +// binds it to a yamux session (V2). Subsequent logical streams are +// opened without additional HTTP handshakes and are protocol-negotiated +// via multistream-select. +func (h *Handlers) HandleConnectionV2(c echo.Context) error { + log.Trace("handling v2 connection") + defer log.Trace("v2 connection handle closed") + + conn, err := upgrader.Upgrade(c.Response(), c.Request(), nil) + if err != nil { + return c.String(http.StatusInternalServerError, err.Error()) + } + + var data HandleConnectionV2Data + + if err := c.Bind(&data); err != nil { + log.WithError(err).Error("failed to bind the request") + + return err + } + + if err := c.Validate(&data); err != nil { + log.WithError(err).Error("failed to validate the request") + + return err + } + + logger := log.WithFields(log.Fields{ + "request-id": data.RequestID, + "tenant": data.Tenant, + "uid": data.UID, + }) + + logger.Info("v2 connection established") + + if err := h.Dialer.Manager.Bind( + data.Tenant, + data.UID, + wsconnadapter.New( + conn, + wsconnadapter.WithID(data.RequestID), + wsconnadapter.WithDevice(data.Tenant, data.UID), + ), + ); err != nil { + logger.WithError(err).Error("failed to bind the connection") + + return err + } + + logger.Info("v2 connection bound") + + return nil +} diff --git a/ssh/http/server.go b/ssh/http/server.go new file mode 100644 index 00000000000..07870ad40db --- /dev/null +++ b/ssh/http/server.go @@ -0,0 +1,150 @@ +package http + +import ( + "errors" + "net/http" + + "github.com/gorilla/websocket" + "github.com/labstack/echo/v4" + "github.com/shellhub-io/shellhub/pkg/api/internalclient" + "github.com/shellhub-io/shellhub/pkg/revdial" + "github.com/shellhub-io/shellhub/pkg/validator" + "github.com/shellhub-io/shellhub/ssh/pkg/dialer" +) + +type Message struct { + Message string `json:"message"` +} + +func NewMessageFromError(err error) Message { + return Message{ + Message: err.Error(), + } +} + +// Config controls optional features for the SSH HTTP sidecar server. +// +// When WebEndpoints is enabled the server exposes an HTTP proxy entry +// point (/http/proxy) that allows externally accessible per-device +// subdomains to be resolved and forwarded through the reverse tunnel +// transport (supporting both legacy V1 and yamux/multistream V2). +type Config struct { + // WebEndpoints enables the web endpoints (HTTP proxy) feature. + WebEndpoints bool + // WebEndpointsDomain is the base domain used when constructing the + // host header for tunneled HTTP requests (e.g.
.). + WebEndpointsDomain string +} + +// Server wires HTTP routes (connection upgrade, reverse dialing, +// web endpoint proxy, healthcheck) to the underlying dialer and +// handlers. It exposes both V1 (/ssh/connection + /ssh/revdial) and V2 +// (/connection) endpoints during the transition period while agents +// upgrade. +type Server struct { + Config *Config + Router *echo.Echo + Handlers *Handlers +} + +var ( + ErrWebEndpointForbidden = errors.New("web endpoint not found") + ErrDeviceTunnelDial = errors.New("failed to connect to device") + ErrDeviceTunnelWriteRequest = errors.New("failed to send data to the device") + ErrDeviceTunnelReadResponse = errors.New("failed to write the response back to the client") + ErrDeviceTunnelHijackRequest = errors.New("failed to capture the request") + ErrDeviceTunnelParsePath = errors.New("failed to parse the path") + ErrDeviceTunnelConnect = errors.New("failed to connect to the port on device") +) + +var upgrader = websocket.Upgrader{ + ReadBufferSize: 1024, + WriteBufferSize: 1024, + Subprotocols: []string{"binary"}, + CheckOrigin: func(_ *http.Request) bool { + return true + }, +} + +// ListenAndServe starts the Echo HTTP server on the provided address. +func (s *Server) ListenAndServe(address string) error { + return s.Router.Start(address) +} + +type Binder struct{} + +func NewBinder() *Binder { + return &Binder{} +} + +func (b *Binder) Bind(s any, c echo.Context) error { + binder := new(echo.DefaultBinder) + if err := binder.Bind(s, c); err != nil { + err := err.(*echo.HTTPError) //nolint:forcetypeassert + + return err + } + + if err := binder.BindHeaders(c, s); err != nil { + err := err.(*echo.HTTPError) //nolint:forcetypeassert + + return err + } + + return nil +} + +type Validator struct { + validator *validator.Validator +} + +// NewValidator creates a new validator for the echo framework from the ShellHub validator. +func NewValidator() *Validator { + return &Validator{validator: validator.New()} +} + +// Validate is called by the echo framework to validate the request body. +// If the request body is invalid, it returns an error with the invalid fields. +func (v *Validator) Validate(structure any) error { + if ok, err := v.validator.Struct(structure); !ok || err != nil { + return err + } + + return nil +} + +func NewServer(d *dialer.Dialer, cli internalclient.Client, cfg *Config) *Server { + r := echo.New() + + r.Binder = NewBinder() + r.Validator = NewValidator() + r.HideBanner = true + r.HidePort = true + + handlers := &Handlers{ + Dialer: d, + Client: cli, + Config: cfg, + } + + r.GET(HandleConnectionV1Path, handlers.HandleConnectionV1) + r.GET(HandleConnectionV2Path, handlers.HandleConnectionV2) + + r.GET(HandleRevdialPath, echo.WrapHandler(revdial.ConnHandler(upgrader))) + + r.POST(HandleSSHClosePath, handlers.HandleSSHClose) + r.GET(HandleHealthcheckPath, handlers.HandleHealthcheck) + + if cfg.WebEndpoints { + // NOTE: The `/http/proxy` endpoint is invoked by the NGINX gateway when a tunnel URL is accessed. It processes + // the `X-Address` and `X-Path` headers, which specify the tunnel's address and the target path on the server, + // returning an error related to the connection to device or what was returned from the server inside the tunnel. + r.Any(HandleHTTPProxyPath, handlers.HandleHTTPProxy) + } + + return &Server{ + Config: cfg, + Router: r, + Handlers: handlers, + } +} diff --git a/ssh/main.go b/ssh/main.go index 3d3047ac608..553e5495179 100644 --- a/ssh/main.go +++ b/ssh/main.go @@ -1,137 +1,69 @@ package main import ( - "context" - "errors" "fmt" - "io" - "net/http" "runtime" + "time" "github.com/labstack/echo-contrib/pprof" - "github.com/labstack/echo/v4" "github.com/shellhub-io/shellhub/pkg/api/internalclient" + "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/envs" "github.com/shellhub-io/shellhub/pkg/loglevel" - sshTunnel "github.com/shellhub-io/shellhub/ssh/pkg/tunnel" - "github.com/shellhub-io/shellhub/ssh/server" - "github.com/shellhub-io/shellhub/ssh/server/handler" + "github.com/shellhub-io/shellhub/ssh/http" + "github.com/shellhub-io/shellhub/ssh/pkg/dialer" + ssh "github.com/shellhub-io/shellhub/ssh/server" "github.com/shellhub-io/shellhub/ssh/web" - "github.com/shellhub-io/shellhub/ssh/web/pkg/cache" log "github.com/sirupsen/logrus" ) +const ListenAddress = ":8080" + func init() { loglevel.SetLogLevel() log.SetFormatter(&log.JSONFormatter{}) } +type Envs struct { + ConnectTimeout time.Duration `env:"CONNECT_TIMEOUT,default=30s"` + RedisURI string `env:"REDIS_URI,default=redis://redis:6379"` + // Allows SSH to connect with an agent via a public key when the agent version is less than 0.6.0. + // Agents 0.5.x or earlier do not validate the public key request and may panic. + // Please refer to: https://github.com/shellhub-io/shellhub/issues/3453 + AllowPublickeyAccessBelow060 bool `env:"ALLOW_PUBLIC_KEY_ACCESS_BELLOW_0_6_0,default=false"` + WebEndpoints bool `env:"SHELLHUB_WEB_ENDPOINTS,default=false"` + WebEndpointsDomain string `env:"SHELLHUB_WEB_ENDPOINTS_DOMAIN"` +} + func main() { // Populates configuration based on environment variables prefixed with 'SSH_'. - env, err := envs.ParseWithPrefix[server.Options]("SSH_") + env, err := envs.ParseWithPrefix[Envs]("SSH_") if err != nil { log.WithError(err).Fatal("Failed to load environment variables") } - // NOTICE: This redis is used by the web terminal to store its session tokens. - if err := cache.ConnectRedis(env.RedisURI); err != nil { - log.WithError(err).Fatal("Failed to connect to redis") + cache, err := cache.NewRedisCache(env.RedisURI, 0) + if err != nil { + log.WithError(err). + Fatal("failed to connect to redis cache") } - tunnel := sshTunnel.NewTunnel("/ssh/connection", "/ssh/revdial") - - tunnel.API = internalclient.NewClientWithAsynq(env.RedisURI) - if tunnel.API == nil { - log.Fatal("failed to create internal client") + cli, err := internalclient.NewClient(nil, internalclient.WithAsynqWorker(env.RedisURI)) + if err != nil { + log.WithError(err). + Fatal("failed to create the tunnel") } - router := tunnel.GetRouter() - router.POST("/sessions/:uid/close", func(c echo.Context) error { - exit := func(status int, err error) error { - log.WithError(err).WithField("status", status).Error("failed to close the session") - - return c.JSON(status, err.Error()) - } - - uid := c.Param("uid") - var closeRequest struct { - Device string `json:"device"` - } - if err := c.Bind(&closeRequest); err != nil { - return exit(http.StatusBadRequest, err) - } - - conn, err := tunnel.Dial(context.Background(), closeRequest.Device) - if err != nil { - return exit(http.StatusInternalServerError, err) - } - - req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("/ssh/close/%s", uid), nil) - if err != nil { - return exit(http.StatusInternalServerError, err) - } - - if err := req.Write(conn); err != nil { - return exit(http.StatusInternalServerError, err) - } - - return c.NoContent(http.StatusOK) - }) + d := dialer.NewDialer(cli) - router.Any("/ssh/http", func(c echo.Context) error { - replyError := func(err error, msg string, code int) error { - log.WithError(err).WithFields(log.Fields{ - "remote": c.Request().RemoteAddr, - "address": c.Request().Header.Get("X-Public-Address"), - "path": c.Request().Header.Get("X-Path"), - }).Error(msg) - - return c.String(code, msg) - } - - dev, err := tunnel.API.GetDeviceByPublicURLAddress(c.Request().Header.Get("X-Public-URL-Address")) - if err != nil { - return replyError(err, "failed to get device data", http.StatusInternalServerError) - } - - if !dev.PublicURL { - return replyError(err, "this device is not accessible via public URL", http.StatusForbidden) - } - - in, err := tunnel.Dial(c.Request().Context(), dev.UID) - if err != nil { - return replyError(err, "failed to connect to device", http.StatusInternalServerError) - } - - defer in.Close() - - if err := c.Request().Write(in); err != nil { - return replyError(err, "failed to write request to device", http.StatusInternalServerError) - } - - ctr := http.NewResponseController(c.Response()) - out, _, err := ctr.Hijack() - if err != nil { - return replyError(err, "failed to hijack response", http.StatusInternalServerError) - } - - defer out.Close() - if _, err := io.Copy(out, in); errors.Is(err, io.ErrUnexpectedEOF) { - return replyError(err, "failed to copy response from device service to client", http.StatusInternalServerError) - } - - return nil + h := http.NewServer(d, cli, &http.Config{ + WebEndpoints: env.WebEndpoints, + WebEndpointsDomain: env.WebEndpointsDomain, }) - // TODO: add `/ws/ssh` route to OpenAPI repository. - router.GET("/ws/ssh", echo.WrapHandler(web.HandlerRestoreSession(web.RestoreSession, handler.WebSession))) - router.POST("/ws/ssh", echo.WrapHandler(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { - web.HandlerCreateSession(web.CreateSession)(res, req) - }))) + router := h.Router - router.GET("/healthcheck", func(c echo.Context) error { - return c.String(http.StatusOK, "OK") - }) + web.NewSSHServerBridge(router, cache) if envs.IsDevelopment() { runtime.SetBlockProfileRate(1) @@ -140,7 +72,40 @@ func main() { log.Info("Profiling enabled at http://0.0.0.0:8080/debug/pprof/") } - go http.ListenAndServe(":8080", router) // nolint:errcheck + s := ssh.NewServer(d, cache, &ssh.Options{ + ConnectTimeout: env.ConnectTimeout, + AllowPublickeyAccessBelow060: env.AllowPublickeyAccessBelow060, + }) + + errs := make(chan error) + + go func() { + defer func() { + if r := recover(); r != nil { + log.Debugf("listen for HTTP server on %s paniced", ListenAddress) + + errs <- fmt.Errorf("listen for HTTP on %s paniced", ListenAddress) + } + }() + + errs <- h.ListenAndServe(ListenAddress) + }() + + go func() { + defer func() { + if r := recover(); r != nil { + log.Debugf("listen for SSH server paniced") + + errs <- fmt.Errorf("listen for SSH server paniced") + } + }() + + errs <- s.ListenAndServe() + }() + + if err := <-errs; err != nil { + log.WithError(err).Fatal("a fatal error was send from HTTP or SSH server") + } - log.Fatal(server.NewServer(env, tunnel.Tunnel).ListenAndServe()) + log.Warn("ssh service is closed") } diff --git a/ssh/mocks/session.go b/ssh/mocks/session.go deleted file mode 100644 index ce83e2a607d..00000000000 --- a/ssh/mocks/session.go +++ /dev/null @@ -1,357 +0,0 @@ -// Code generated by mockery v2.20.0. DO NOT EDIT. - -package mocks - -import ( - io "io" - net "net" - - mock "github.com/stretchr/testify/mock" - - ssh "github.com/gliderlabs/ssh" -) - -// Session is an autogenerated mock type for the Session type -type Session struct { - mock.Mock -} - -// Break provides a mock function with given fields: c -func (_m *Session) Break(c chan<- bool) { - _m.Called(c) -} - -// Close provides a mock function with given fields: -func (_m *Session) Close() error { - ret := _m.Called() - - var r0 error - if rf, ok := ret.Get(0).(func() error); ok { - r0 = rf() - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// CloseWrite provides a mock function with given fields: -func (_m *Session) CloseWrite() error { - ret := _m.Called() - - var r0 error - if rf, ok := ret.Get(0).(func() error); ok { - r0 = rf() - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Command provides a mock function with given fields: -func (_m *Session) Command() []string { - ret := _m.Called() - - var r0 []string - if rf, ok := ret.Get(0).(func() []string); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]string) - } - } - - return r0 -} - -// Context provides a mock function with given fields: -func (_m *Session) Context() ssh.Context { - ret := _m.Called() - - var r0 ssh.Context - if rf, ok := ret.Get(0).(func() ssh.Context); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(ssh.Context) - } - } - - return r0 -} - -// Environ provides a mock function with given fields: -func (_m *Session) Environ() []string { - ret := _m.Called() - - var r0 []string - if rf, ok := ret.Get(0).(func() []string); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]string) - } - } - - return r0 -} - -// Exit provides a mock function with given fields: code -func (_m *Session) Exit(code int) error { - ret := _m.Called(code) - - var r0 error - if rf, ok := ret.Get(0).(func(int) error); ok { - r0 = rf(code) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// LocalAddr provides a mock function with given fields: -func (_m *Session) LocalAddr() net.Addr { - ret := _m.Called() - - var r0 net.Addr - if rf, ok := ret.Get(0).(func() net.Addr); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(net.Addr) - } - } - - return r0 -} - -// Permissions provides a mock function with given fields: -func (_m *Session) Permissions() ssh.Permissions { - ret := _m.Called() - - var r0 ssh.Permissions - if rf, ok := ret.Get(0).(func() ssh.Permissions); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(ssh.Permissions) - } - - return r0 -} - -// Pty provides a mock function with given fields: -func (_m *Session) Pty() (ssh.Pty, <-chan ssh.Window, bool) { - ret := _m.Called() - - var r0 ssh.Pty - var r1 <-chan ssh.Window - var r2 bool - if rf, ok := ret.Get(0).(func() (ssh.Pty, <-chan ssh.Window, bool)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() ssh.Pty); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(ssh.Pty) - } - - if rf, ok := ret.Get(1).(func() <-chan ssh.Window); ok { - r1 = rf() - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).(<-chan ssh.Window) - } - } - - if rf, ok := ret.Get(2).(func() bool); ok { - r2 = rf() - } else { - r2 = ret.Get(2).(bool) - } - - return r0, r1, r2 -} - -// PublicKey provides a mock function with given fields: -func (_m *Session) PublicKey() ssh.PublicKey { - ret := _m.Called() - - var r0 ssh.PublicKey - if rf, ok := ret.Get(0).(func() ssh.PublicKey); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(ssh.PublicKey) - } - } - - return r0 -} - -// RawCommand provides a mock function with given fields: -func (_m *Session) RawCommand() string { - ret := _m.Called() - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// Read provides a mock function with given fields: data -func (_m *Session) Read(data []byte) (int, error) { - ret := _m.Called(data) - - var r0 int - var r1 error - if rf, ok := ret.Get(0).(func([]byte) (int, error)); ok { - return rf(data) - } - if rf, ok := ret.Get(0).(func([]byte) int); ok { - r0 = rf(data) - } else { - r0 = ret.Get(0).(int) - } - - if rf, ok := ret.Get(1).(func([]byte) error); ok { - r1 = rf(data) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RemoteAddr provides a mock function with given fields: -func (_m *Session) RemoteAddr() net.Addr { - ret := _m.Called() - - var r0 net.Addr - if rf, ok := ret.Get(0).(func() net.Addr); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(net.Addr) - } - } - - return r0 -} - -// SendRequest provides a mock function with given fields: name, wantReply, payload -func (_m *Session) SendRequest(name string, wantReply bool, payload []byte) (bool, error) { - ret := _m.Called(name, wantReply, payload) - - var r0 bool - var r1 error - if rf, ok := ret.Get(0).(func(string, bool, []byte) (bool, error)); ok { - return rf(name, wantReply, payload) - } - if rf, ok := ret.Get(0).(func(string, bool, []byte) bool); ok { - r0 = rf(name, wantReply, payload) - } else { - r0 = ret.Get(0).(bool) - } - - if rf, ok := ret.Get(1).(func(string, bool, []byte) error); ok { - r1 = rf(name, wantReply, payload) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Signals provides a mock function with given fields: c -func (_m *Session) Signals(c chan<- ssh.Signal) { - _m.Called(c) -} - -// Stderr provides a mock function with given fields: -func (_m *Session) Stderr() io.ReadWriter { - ret := _m.Called() - - var r0 io.ReadWriter - if rf, ok := ret.Get(0).(func() io.ReadWriter); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(io.ReadWriter) - } - } - - return r0 -} - -// Subsystem provides a mock function with given fields: -func (_m *Session) Subsystem() string { - ret := _m.Called() - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// User provides a mock function with given fields: -func (_m *Session) User() string { - ret := _m.Called() - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// Write provides a mock function with given fields: data -func (_m *Session) Write(data []byte) (int, error) { - ret := _m.Called(data) - - var r0 int - var r1 error - if rf, ok := ret.Get(0).(func([]byte) (int, error)); ok { - return rf(data) - } - if rf, ok := ret.Get(0).(func([]byte) int); ok { - r0 = rf(data) - } else { - r0 = ret.Get(0).(int) - } - - if rf, ok := ret.Get(1).(func([]byte) error); ok { - r1 = rf(data) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -type mockConstructorTestingTNewSession interface { - mock.TestingT - Cleanup(func()) -} - -// NewSession creates a new instance of Session. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -func NewSession(t mockConstructorTestingTNewSession) *Session { - mock := &Session{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/ssh/pkg/dialer/dialer.go b/ssh/pkg/dialer/dialer.go new file mode 100644 index 00000000000..104f7338312 --- /dev/null +++ b/ssh/pkg/dialer/dialer.go @@ -0,0 +1,97 @@ +package dialer + +import ( + "context" + "errors" + "net" + "strings" + + "github.com/shellhub-io/shellhub/pkg/api/internalclient" + log "github.com/sirupsen/logrus" +) + +// NewKey joins tenant and device UID in the canonical form used as the +// identifier inside the connection manager maps. +func NewKey(tenant, uid string) string { + return strings.Join([]string{tenant, uid}, ":") +} + +type Dialer struct { + Manager *Manager + client internalclient.Client +} + +func NewDialer(client internalclient.Client) *Dialer { + m := NewManager() + + m.DialerDoneCallback = func(key string) { + // TODO: Use `Key` struct when available to avoid string parsing on every call. + parts := strings.Split(key, ":") + if len(parts) != 2 { + log.Error("failed to parse key at close handler") + + return + } + + tenant := parts[0] + uid := parts[1] + + if err := client.DevicesOffline(context.TODO(), uid); err != nil { + log.WithError(err). + WithFields(log.Fields{ + "uid": uid, + "tenant_id": tenant, + }). + Error("failed to set device offline") + } + } + + m.DialerKeepAliveCallback = func(key string) { + // TODO: Use `Key` struct when available to avoid string parsing on every call. + parts := strings.Split(key, ":") + if len(parts) != 2 { + log.Error("failed to parse key at keep alive handler") + + return + } + + tenant := parts[0] + uid := parts[1] + + if err := client.DevicesHeartbeat(context.TODO(), uid); err != nil { + log.WithError(err). + WithFields(log.Fields{ + "uid": uid, + "tenant_id": tenant, + }). + Error("failed to send heartbeat signal") + } + } + + return &Dialer{ + Manager: m, + client: client, + } +} + +var ErrInvalidArgument = errors.New("invalid argument") + +// DialTo establishes a raw reverse connection to the device and performs +// the version-specific bootstrap for the provided target. It returns a +// connection ready for application protocol usage. +func (t *Dialer) DialTo(ctx context.Context, tenant string, uid string, target Target) (net.Conn, error) { + if tenant == "" || uid == "" { + return nil, ErrInvalidArgument + } + + conn, version, err := t.Manager.Dial(ctx, NewKey(tenant, uid)) + if err != nil { + return nil, err + } + + if target == nil { + return conn, nil + } + + return target.prepare(conn, version) +} diff --git a/ssh/pkg/dialer/docs.go b/ssh/pkg/dialer/docs.go new file mode 100644 index 00000000000..21fb7f48247 --- /dev/null +++ b/ssh/pkg/dialer/docs.go @@ -0,0 +1,50 @@ +// Package dialer provides utilities to manage and use reverse connections +// opened by agents so the server (or other services) can dial back into a +// device. The package supports two transport modes (protocol versions): the +// legacy revdial-based HTTP transport (v1) and a yamux-multiplexed transport +// (v2). When using v2, per-stream application protocols are negotiated using +// multistream identifiers defined in this package. +// +// # High level concepts +// +// - Manager: a connection manager that stores active reverse transports and +// exposes methods to bind new agent connections and to dial a device by +// its key. It also exposes callbacks for tracking when connections are +// closed or when keep-alive events occur. +// +// - Dialer: a thin wrapper around a Manager which also holds an +// internalclient to perform device lifecycle operations (heartbeat / +// offline notifications) and provides DialTo which returns a ready-to-use +// net.Conn for a requested Target. +// +// - Target: an interface implemented by small helpers that prepare a raw +// connection for a particular application-level purpose (for example, +// opening or closing an SSH session, or establishing an HTTP proxy). The +// prepare method will perform any necessary handshake depending on the +// negotiated transport version. +// +// # Versioning +// +// TransportVersion1 (v1) uses the older revdial/http handshake where the +// client expects HTTP-style GET/CONNECT requests. TransportVersion2 (v2) +// uses a yamux session and performs per-stream negotiation with the +// multistream protocol strings (see ProtoSSHOpen, ProtoSSHClose, +// ProtoHTTPProxy). Callers should prepare the appropriate Target and the +// dialer will perform the correct handshake based on the returned +// TransportVersion. +// +// Usage (server-side) +// +// Typical server usage is: +// - When an agent connects, call Manager.Bind(tenant, uid, conn) to +// register the reverse transport. The manager will keep the session alive +// and call configured callbacks on events. +// - To connect to a device, create a Dialer (NewDialer) and call +// Dialer.DialTo(ctx, tenant, uid, target). DialTo returns a net.Conn +// already prepared for the requested target (or a raw connection if the +// target is nil). +// +// The package intentionally keeps the wire-level protocol identifiers and +// version handling colocated with the dial logic so the agent and server +// implementations can remain compatible and easy to reason about. +package dialer diff --git a/ssh/pkg/dialer/manager.go b/ssh/pkg/dialer/manager.go new file mode 100644 index 00000000000..a41711abcca --- /dev/null +++ b/ssh/pkg/dialer/manager.go @@ -0,0 +1,204 @@ +package dialer + +import ( + "context" + "errors" + "net" + "os" + "time" + + "github.com/hashicorp/yamux" + "github.com/shellhub-io/shellhub/pkg/revdial" + "github.com/shellhub-io/shellhub/pkg/wsconnadapter" + log "github.com/sirupsen/logrus" +) + +var ErrNoConnection = errors.New("no connection") + +type Manager struct { + Connections *SyncSliceMap + DialerDoneCallback func(string) + DialerKeepAliveCallback func(string) +} + +func NewManager() *Manager { + return &Manager{ + Connections: &SyncSliceMap{}, + DialerDoneCallback: func(string) {}, + DialerKeepAliveCallback: func(string) {}, + } +} + +func (m *Manager) Set(key string, conn *wsconnadapter.Adapter, connPath string) { + dialer := revdial.NewDialer(conn.Logger, conn, connPath) + + m.Connections.Store(key, dialer) + + if size := m.Connections.Size(key); size > 1 { + log.WithFields(log.Fields{ + "key": key, + "size": size, + }).Warning("Multiple connections stored for the same identifier.") + } + + m.DialerKeepAliveCallback(key) + + // Start the ping loop and get the channel for pong responses + pong := conn.Ping() + + go func() { + for { + select { + case <-pong: + m.DialerKeepAliveCallback(key) + + continue + case <-dialer.Done(): + m.Connections.Delete(key, dialer) + m.DialerDoneCallback(key) + + return + } + } + }() +} + +// BindPingInterval is the interval between pings sent to the yamux session +// to keep it alive. It should be less than the NAT timeout to avoid +// disconnections. +// It should be the same value as used by the revdial.Dialer ping interval. +const BindPingInterval = 35 * time.Second + +// Bind binds a WebSocket connection to a yamux session and stores it in the connection manager. +// All new agents should use this handler to register their reverse connection. +func (m *Manager) Bind(tenant string, uid string, conn *wsconnadapter.Adapter) error { + key := NewKey(tenant, uid) + + session, err := yamux.Client(conn, &yamux.Config{ + AcceptBacklog: 256, + // NOTE: As we need to keep the registered connection alive, we use our own ping/pong mechanism. + EnableKeepAlive: false, + // NOTE: Although we disable the built-in keepalive, we still need to set the interval to a non-zero value to + // avoid yamux error when verifying the configuration. We've created a Pull Request to improve this behavior. + // TODO: Remove this workaround when yamux supports disabling keepalive completely. + KeepAliveInterval: BindPingInterval, + ConnectionWriteTimeout: 15 * time.Second, + MaxStreamWindowSize: 256 * 1024, + StreamCloseTimeout: 5 * time.Minute, + StreamOpenTimeout: 75 * time.Second, + LogOutput: os.Stderr, + }) + if err != nil { + log.WithError(err).Error("failed to create yamux client session") + + return err + } + + m.Connections.Store(key, session) + + if size := m.Connections.Size(key); size > 1 { + log.WithFields(log.Fields{ + "key": key, + "size": size, + }).Warning("Multiple connections stored for the same identifier.") + } + + m.DialerKeepAliveCallback(key) + + go func() { + for { + select { + // NOTE: Ping is also important to keep the underlying WebSocket connection alive and avoid NAT timeouts. + case <-time.After(BindPingInterval): + if _, err := session.Ping(); err != nil { + log.WithFields(log.Fields{ + "key": key, + }).WithError(err).Error("failed to ping yamux session") + + m.Connections.Delete(key, session) + m.DialerDoneCallback(key) + + return + } + + m.DialerKeepAliveCallback(key) + + continue + case <-session.CloseChan(): + m.Connections.Delete(key, session) + m.DialerDoneCallback(key) + + return + } + } + }() + + return nil +} + +// TransportVersion protocol version identifiers used when dialing a device. +type TransportVersion byte + +const ( + // TransportVersionUnknown is used when the transport version could not be determined. + TransportVersionUnknown TransportVersion = 0 + // TransportVersion1 is the legacy transport using revdial over HTTP. + TransportVersion1 TransportVersion = 1 + // TransportVersion2 is the current transport using yamux multiplexing. + TransportVersion2 TransportVersion = 2 +) + +// Dial tries to find a connection by its key and dials it. +// +// It returns the connection, its version ([TransportVersion1] or [TransportVersion2]) and an error, +func (m *Manager) Dial(ctx context.Context, key string) (net.Conn, TransportVersion, error) { + loaded, ok := m.Connections.Load(key) + if !ok { + return nil, TransportVersionUnknown, ErrNoConnection + } + + if size := m.Connections.Size(key); size > 1 { + log.WithFields(log.Fields{ + "key": key, + "size": size, + }).Warning("Multiple connections found for the same identifier during reverse tunnel dialing.") + } + + if dialer, ok := loaded.(*revdial.Dialer); ok { + log.WithFields(log.Fields{ + "key": key, + "version": "v1", + }).Debug("using v1 dialer for reverse tunnel dialing") + + conn, err := dialer.Dial(ctx) + if err != nil { + log.WithFields(log.Fields{ + "key": key, + "version": "v1", + }).WithError(err).Error("failed to dial reverse connection") + + return nil, TransportVersionUnknown, err + } + + return conn, TransportVersion1, nil + } + + if session, ok := loaded.(*yamux.Session); ok { + log.WithFields(log.Fields{ + "key": key, + "version": "v2", + }).Debug("using v2 connection for reverse tunnel dialing") + + conn, err := session.Open() + if err != nil { + log.WithFields(log.Fields{ + "key": key, + "version": "v2", + }).WithError(err).Error("failed to open yamux stream for reverse connection") + } + + return conn, TransportVersion2, nil + } + + return nil, TransportVersionUnknown, ErrNoConnection +} diff --git a/ssh/pkg/dialer/protocols.go b/ssh/pkg/dialer/protocols.go new file mode 100644 index 00000000000..cba9269708f --- /dev/null +++ b/ssh/pkg/dialer/protocols.go @@ -0,0 +1,12 @@ +package dialer + +// Multistream protocol identifiers used when negotiating per-stream +// application protocols over a V2 yamux connection. +// +// The agent and server must keep these values in sync. Changing a value +// is a wire incompatible change. +const ( + ProtoSSHOpen = "/ssh/open/1.0.0" + ProtoSSHClose = "/ssh/close/1.0.0" + ProtoHTTPProxy = "/http/proxy/1.0.0" +) diff --git a/pkg/connman/syncslicemap.go b/ssh/pkg/dialer/syncslicemap.go similarity index 99% rename from pkg/connman/syncslicemap.go rename to ssh/pkg/dialer/syncslicemap.go index f37e1383aac..07aa92a5504 100644 --- a/pkg/connman/syncslicemap.go +++ b/ssh/pkg/dialer/syncslicemap.go @@ -1,4 +1,4 @@ -package connman +package dialer import "sync" diff --git a/pkg/connman/syncslicemap_test.go b/ssh/pkg/dialer/syncslicemap_test.go similarity index 99% rename from pkg/connman/syncslicemap_test.go rename to ssh/pkg/dialer/syncslicemap_test.go index 7414f9f9e8c..11344aa9223 100644 --- a/pkg/connman/syncslicemap_test.go +++ b/ssh/pkg/dialer/syncslicemap_test.go @@ -1,4 +1,4 @@ -package connman +package dialer import ( "testing" diff --git a/ssh/pkg/dialer/target.go b/ssh/pkg/dialer/target.go new file mode 100644 index 00000000000..70533fa982c --- /dev/null +++ b/ssh/pkg/dialer/target.go @@ -0,0 +1,126 @@ +package dialer + +import ( + "bufio" + "encoding/json" + "fmt" + "io" + "net" + "net/http" + "strconv" + + "github.com/multiformats/go-multistream" + log "github.com/sirupsen/logrus" +) + +type Target interface { + prepare(conn net.Conn, version TransportVersion) (net.Conn, error) +} + +// SSHOpenTarget prepares a connection for initiating a new SSH session +// with the agent. +type SSHOpenTarget struct{ SessionID string } + +func (t SSHOpenTarget) prepare(conn net.Conn, version TransportVersion) (net.Conn, error) { // nolint:ireturn + switch version { + case TransportVersion1: + log.Debug("preparing SSH open target for transport version 1") + + req, _ := http.NewRequest(http.MethodGet, fmt.Sprintf("/ssh/%s", t.SessionID), nil) + if err := req.Write(conn); err != nil { + log.Errorf("failed to write HTTP request: %v", err) + + return nil, err + } + case TransportVersion2: + log.Debug("preparing SSH open target for transport version 2") + + if err := multistream.SelectProtoOrFail(ProtoSSHOpen, conn); err != nil { + return nil, err + } + if err := json.NewEncoder(conn).Encode(map[string]string{"id": t.SessionID}); err != nil { + return nil, err + } + default: + return nil, fmt.Errorf("unsupported transport version: %d", version) + } + + return conn, nil +} + +// SSHCloseTarget prepares a connection to request closing an existing SSH session. +type SSHCloseTarget struct{ SessionID string } + +func (t SSHCloseTarget) prepare(conn net.Conn, version TransportVersion) (net.Conn, error) { // nolint:ireturn + switch version { + case TransportVersion1: + req, _ := http.NewRequest(http.MethodGet, fmt.Sprintf("/ssh/close/%s", t.SessionID), nil) + if err := req.Write(conn); err != nil { + return nil, err + } + case TransportVersion2: + if err := multistream.SelectProtoOrFail(ProtoSSHClose, conn); err != nil { + return nil, err + } + if err := json.NewEncoder(conn).Encode(map[string]string{"id": t.SessionID}); err != nil { + return nil, err + } + default: + return nil, fmt.Errorf("unsupported transport version: %d", version) + } + + return conn, nil +} + +// HTTPProxyTarget prepares a connection for proxying HTTP traffic to a +// device web endpoint. After preparation the caller should write the +// final HTTP request (with rewritten Host + URL) directly to the +// returned connection. +type HTTPProxyTarget struct { + RequestID string + Host string + Port int +} + +func (t HTTPProxyTarget) prepare(conn net.Conn, version TransportVersion) (net.Conn, error) { // nolint:ireturn + switch version { + case TransportVersion1: + // Write initial handshake request and expect 200 OK. + handshakeReq, _ := http.NewRequest(http.MethodConnect, fmt.Sprintf("/http/proxy/%s:%d", t.Host, t.Port), nil) + if err := handshakeReq.Write(conn); err != nil { + return nil, err + } + resp, err := http.ReadResponse(bufio.NewReader(conn), handshakeReq) + if err != nil { + return nil, err + } + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("http proxy handshake failed: %s", resp.Status) + } + case TransportVersion2: + if err := multistream.SelectProtoOrFail(ProtoHTTPProxy, conn); err != nil { + return nil, err + } + if err := json.NewEncoder(conn).Encode(map[string]string{ + "id": t.RequestID, + "host": t.Host, + "port": strconv.Itoa(t.Port), + }); err != nil { + return nil, err + } + result := map[string]string{} + + // NOTE: limit the size of the response to avoid DoS via large payloads. + const Limit = 512 + if err := json.NewDecoder(io.LimitReader(conn, Limit)).Decode(&result); err != nil { + return nil, err + } + if result["status"] != "ok" { + return nil, fmt.Errorf("http proxy negotiation failed: %s", result["message"]) + } + default: + return nil, fmt.Errorf("unsupported transport version: %d", version) + } + + return conn, nil +} diff --git a/ssh/pkg/dialer/throttle.go b/ssh/pkg/dialer/throttle.go new file mode 100644 index 00000000000..327caf6a548 --- /dev/null +++ b/ssh/pkg/dialer/throttle.go @@ -0,0 +1,247 @@ +package dialer + +import ( + "context" + "errors" + "io" + "net" + "sync" + "time" + + "golang.org/x/time/rate" +) + +// ErrNegativeLimit is returned when attempting to set a negative limit. +var ErrNegativeLimit = errors.New("negative throttle limit") + +// Option configures a Throttler. +type Option func(*Throttler) + +// WithReadLimit sets the read bytes-per-second limit and burst. +// If bps <= 0 => unlimited. If burst <=0 it defaults to bps. +func WithReadLimit(bps int, burst int) Option { + return func(t *Throttler) { + t.setLimiter(&t.readMu, &t.readLimiter, bps, burst) + } +} + +// WithWriteLimit sets the write bytes-per-second limit and burst. +// If bps <= 0 => unlimited. If burst <=0 it defaults to bps. +func WithWriteLimit(bps int, burst int) Option { + return func(t *Throttler) { + t.setLimiter(&t.writeMu, &t.writeLimiter, bps, burst) + } +} + +// Throttler wraps an underlying io.Reader / io.Writer (optionally both) and +// enforces directional byte-per-second limits using token buckets. +// It is safe for concurrent use of Read and Write. +type Throttler struct { + // Underlying read side (may be nil if only writing). + R io.Reader + // Underlying write side (may be nil if only reading). + W io.Writer + + readMu sync.RWMutex + readLimiter *rate.Limiter + + writeMu sync.RWMutex + writeLimiter *rate.Limiter +} + +func NewThrottler(r io.Reader, w io.Writer, opts ...Option) *Throttler { + t := &Throttler{R: r, W: w} + + for _, o := range opts { + o(t) + } + + return t +} + +// setLimiter (internal) creates or clears a limiter based on bps. +func (t *Throttler) setLimiter(mu *sync.RWMutex, lim **rate.Limiter, bps int, burst int) { + mu.Lock() + defer mu.Unlock() + + if bps <= 0 { + *lim = nil + + return + } + + if burst <= 0 { + burst = bps + } + + *lim = rate.NewLimiter(rate.Limit(bps), burst) +} + +// UpdateReadLimit dynamically changes the read limit. +func (t *Throttler) UpdateReadLimit(bps int, burst int) error { + if bps < 0 || burst < 0 { + return ErrNegativeLimit + } + + t.setLimiter(&t.readMu, &t.readLimiter, bps, burst) + + return nil +} + +// UpdateWriteLimit dynamically changes the write limit. +func (t *Throttler) UpdateWriteLimit(bps int, burst int) error { + if bps < 0 || burst < 0 { + return ErrNegativeLimit + } + + t.setLimiter(&t.writeMu, &t.writeLimiter, bps, burst) + + return nil +} + +// Read implements io.Reader with throttling. +func (t *Throttler) Read(p []byte) (int, error) { + if t.R == nil { + return 0, errors.New("read not supported (nil underlying Reader)") + } + + lim := t.getReadLimiter() + + if lim == nil { + return t.R.Read(p) + } + + maxChunk := lim.Burst() + if maxChunk <= 0 { + maxChunk = 32 * 1024 + } + + total := 0 + for total < len(p) { + remaining := len(p) - total + chunk := min(remaining, maxChunk) + + if err := lim.WaitN(context.Background(), chunk); err != nil { + if total > 0 { + return total, err + } + + return 0, err + } + + n, err := t.R.Read(p[total : total+chunk]) + total += n + if err != nil || n == 0 { + return total, err + } + + if n < chunk { + break + } + } + + return total, nil +} + +// Write implements io.Writer with throttling. +func (t *Throttler) Write(p []byte) (int, error) { + if t.W == nil { + return 0, errors.New("write not supported (nil underlying Writer)") + } + + lim := t.getWriteLimiter() + + if lim == nil { + return t.W.Write(p) + } + + maxChunk := lim.Burst() + if maxChunk <= 0 { + maxChunk = 32 * 1024 + } + + total := 0 + for total < len(p) { + remaining := len(p) - total + chunk := min(remaining, maxChunk) + + if err := lim.WaitN(context.Background(), chunk); err != nil { + if total > 0 { + return total, err + } + + return 0, err + } + + n, err := t.W.Write(p[total : total+chunk]) + total += n + if err != nil || n == 0 { + return total, err + } + + if n < chunk { + break + } + } + + return total, nil +} + +// Helper getters with read locks for concurrency. +func (t *Throttler) getReadLimiter() *rate.Limiter { + t.readMu.RLock() + defer t.readMu.RUnlock() + + return t.readLimiter +} + +func (t *Throttler) getWriteLimiter() *rate.Limiter { + t.writeMu.RLock() + defer t.writeMu.RUnlock() + + return t.writeLimiter +} + +type ConnThrottler struct { + Conn net.Conn + Throttler *Throttler +} + +func (c *ConnThrottler) Close() error { + return c.Conn.Close() +} + +func (c *ConnThrottler) LocalAddr() net.Addr { + return c.Conn.LocalAddr() +} + +func (c *ConnThrottler) Read(b []byte) (n int, err error) { + return c.Throttler.Read(b) +} + +func (c *ConnThrottler) RemoteAddr() net.Addr { + return c.Conn.RemoteAddr() +} + +func (c *ConnThrottler) SetDeadline(t time.Time) error { + return c.Conn.SetDeadline(t) +} + +func (c *ConnThrottler) SetReadDeadline(t time.Time) error { + return c.Conn.SetReadDeadline(t) +} + +func (c *ConnThrottler) SetWriteDeadline(t time.Time) error { + return c.Conn.SetWriteDeadline(t) +} + +func (c *ConnThrottler) Write(b []byte) (n int, err error) { + return c.Throttler.Write(b) +} + +func NewConnThrottler(conn net.Conn, readBps, readBurst, writeBps, writeBurst int) net.Conn { + return &ConnThrottler{ + Conn: conn, + Throttler: NewThrottler(conn, conn, WithReadLimit(readBps, readBurst), WithWriteLimit(writeBps, writeBurst)), + } +} diff --git a/ssh/pkg/dialer/throttle_test.go b/ssh/pkg/dialer/throttle_test.go new file mode 100644 index 00000000000..b337879a4d3 --- /dev/null +++ b/ssh/pkg/dialer/throttle_test.go @@ -0,0 +1,147 @@ +package dialer + +import ( + "bytes" + "io" + "net" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func expectedMinDuration(total, bps, burst int) time.Duration { + if bps <= 0 { + return 0 + } + + remaining := total - burst + if remaining <= 0 { + return 0 + } + + secs := float64(remaining) / float64(bps) + + return time.Duration(secs * float64(time.Second)) +} + +func TestThrottler_TableDriven(t *testing.T) { + cases := []struct { + name string + run func(t *testing.T) + }{ + { + name: "UnlimitedReadFast", + run: func(t *testing.T) { + data := bytes.Repeat([]byte("x"), 1024) + r := bytes.NewReader(data) + + th := NewThrottler(r, nil) // no limits + + buf := make([]byte, len(data)) + start := time.Now() + n, err := th.Read(buf) + dur := time.Since(start) + + assert.Truef(t, err == nil || err == io.EOF, "unexpected read error: %v", err) + assert.Equal(t, len(data), n, "read bytes mismatch") + assert.LessOrEqual(t, dur, 100*time.Millisecond, "unlimited read took too long") + }, + }, + { + name: "NegativeLimitValidation", + run: func(t *testing.T) { + th := NewThrottler(nil, nil) + err := th.UpdateReadLimit(-1, 1) + assert.Equal(t, ErrNegativeLimit, err) + err = th.UpdateWriteLimit(-1, 1) + assert.Equal(t, ErrNegativeLimit, err) + }, + }, + { + name: "ReadRateEnforced", + run: func(t *testing.T) { + total := 200 + bps := 50 + burst := 10 + + data := bytes.Repeat([]byte("r"), total) + r := bytes.NewReader(data) + th := NewThrottler(r, nil, WithReadLimit(bps, burst)) + + buf := make([]byte, total) + start := time.Now() + n, err := th.Read(buf) + dur := time.Since(start) + + assert.Truef(t, err == nil || err == io.EOF, "unexpected read error: %v", err) + assert.Equal(t, total, n, "read bytes mismatch") + + expect := expectedMinDuration(total, bps, burst) + // allow 20% timing slack for scheduler and test flakiness + slack := expect / 5 + assert.Truef(t, dur+slack >= expect, "read duration = %v; want at least ~%v (with slack %v)", dur, expect, slack) + }, + }, + { + name: "WriteRateEnforced", + run: func(t *testing.T) { + total := 200 + bps := 50 + burst := 10 + + var bufOut bytes.Buffer + th := NewThrottler(nil, &bufOut, WithWriteLimit(bps, burst)) + + data := bytes.Repeat([]byte("w"), total) + start := time.Now() + n, err := th.Write(data) + dur := time.Since(start) + + assert.NoError(t, err, "unexpected write error") + assert.Equal(t, total, n, "written bytes mismatch") + + expect := expectedMinDuration(total, bps, burst) + slack := expect / 5 + assert.Truef(t, dur+slack >= expect, "write duration = %v; want at least ~%v (with slack %v)", dur, expect, slack) + }, + }, + { + name: "ConnThrottlerPassthrough", + run: func(t *testing.T) { + c1, c2 := net.Pipe() + t.Cleanup(func() { c1.Close(); c2.Close() }) + + // Wrap c2 with unlimited throttler + thrConn := NewConnThrottler(c2, 0, 0, 0, 0) + + // write from c1, read from thrConn + msg := []byte("hello-throttle") + + done := make(chan error, 1) + go func() { + defer c1.Close() + _, err := c1.Write(msg) + done <- err + }() + + // read on wrapped conn + got := make([]byte, len(msg)) + n, err := thrConn.Read(got) + assert.Truef(t, err == nil || err == io.EOF, "conn read error: %v", err) + assert.Equal(t, len(msg), n, "conn read bytes mismatch") + assert.Equal(t, msg, got, "conn read data mismatch") + + // ensure writer had no error + err = <-done + assert.NoError(t, err, "writer error") + }, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + tc.run(t) + }) + } +} diff --git a/ssh/pkg/flow/flow.go b/ssh/pkg/flow/flow.go deleted file mode 100644 index 455d69add55..00000000000 --- a/ssh/pkg/flow/flow.go +++ /dev/null @@ -1,107 +0,0 @@ -package flow - -import ( - "io" - - log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" -) - -func finish(stream interface{}) error { - if c, ok := stream.(io.Closer); ok { - if err := c.Close(); err != nil && err != io.EOF { - log.WithError(err).Error("failed to close stream") - - return err - } - } - - return nil -} - -type Flow struct { - Stdin io.WriteCloser - Stdout io.Reader - Stderr io.Reader -} - -// NewFlow creates a new Flow from an Connect's session. -// -// It receives a *ssh.Session to be piped into Stdin, Stdout and Stderr. -// -// It returns a *Flow and an error if any piped try failed. -func NewFlow(session *gossh.Session) (*Flow, error) { - stdin, err := session.StdinPipe() - if err != nil { - return nil, err - } - - stdout, err := session.StdoutPipe() - if err != nil { - return nil, err - } - - stderr, err := session.StderrPipe() - if err != nil { - return nil, err - } - - return &Flow{Stdin: stdin, Stdout: stdout, Stderr: stderr}, nil -} - -// PipeIn pipes a session to Flow Stdin. -// -// It receives an io.Reader to be read and a channel to inform if an error occurs while copying. -// -// After copy is code, it trys to close Flow Stdin. -func (f *Flow) PipeIn(client io.Reader, done chan bool) { - if _, err := io.Copy(f.Stdin, client); err != nil && err != io.EOF { - log.WithError(err).Error("failed to copy from client to Stdin") - - done <- false - - return - } - - f.Close() - - done <- true -} - -// PipeOut Pipe pipes a Flow Stdout to a session. -// -// It receives an io.Writer to be written and a channel to inform if an error occurs while copying. -func (f *Flow) PipeOut(client io.Writer, done chan bool) { - if _, err := io.Copy(client, f.Stdout); err != nil && err != io.EOF { - log.WithError(err).Error("failed to copy from Stdout to client") - - done <- false - - return - } - - done <- true -} - -// PipeErr pipes a Flow Stderr to a session. -// -// It receives an io.Writer to be written and a channel to inform if an error occurs while copying. -func (f *Flow) PipeErr(client io.Writer, done chan bool) { - if _, err := io.Copy(client, f.Stderr); err != nil && err != io.EOF { - log.WithError(err).Error("failed to copy from Stderr to client") - - done <- false - - return - } - - done <- true -} - -// Close closes all piped flows. -// -// It returns error if any of piped stream return an error. -func (f *Flow) Close() error { - // NOTICE: We don't close Stdout and Stderr because they aren't implementations of io.Closer. - return finish(f.Stdin) -} diff --git a/ssh/pkg/flow/flow_test.go b/ssh/pkg/flow/flow_test.go deleted file mode 100644 index 30fec9670e7..00000000000 --- a/ssh/pkg/flow/flow_test.go +++ /dev/null @@ -1,169 +0,0 @@ -package flow - -import ( - "errors" - "io" - "testing" - - "github.com/shellhub-io/shellhub/pkg/mocks" - "github.com/stretchr/testify/assert" -) - -func TestPipeIn(t *testing.T) { - cases := []struct { - description string - client io.Reader - setup func() io.Reader - flow *Flow - expected bool - }{ - { - description: "fails when there's an error on client read", - client: mocks.NewReader().OnRead(0, errors.New("read: error")), - flow: &Flow{ - Stdin: mocks.NewWriteCloser().OnWrite(5, nil), - }, - expected: false, - }, - { - description: "fails when there's an error on standard input write", - client: mocks.NewReader().OnRead(5, nil).OnRead(5, nil).EOF(), - flow: &Flow{ - Stdin: mocks.NewWriteCloser().OnWrite(0, errors.New("write: error")), - }, - expected: false, - }, - { - description: "succeeds when both client read and standard input write operations are successful", - client: mocks.NewReader().OnRead(5, nil).EOF(), - flow: &Flow{ - Stdin: mocks.NewWriteCloser().OnWrite(5, nil), - }, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - done := make(chan bool) - - go tc.flow.PipeIn(tc.client, done) - assert.Equal(t, tc.expected, <-done) - }) - } -} - -func TestPipeOut(t *testing.T) { - cases := []struct { - description string - client io.Writer - flow *Flow - expected bool - }{ - { - description: "fails when there's an error on client write", - client: mocks.NewWriteCloser().OnWrite(0, errors.New("write: error")), - flow: &Flow{ - Stdout: mocks.NewReader().OnRead(10, nil).EOF(), - }, - expected: false, - }, - { - description: "fails when there's an error on standard output read", - client: mocks.NewWriteCloser().OnWrite(0, nil), - flow: &Flow{ - Stdout: mocks.NewReader().OnRead(0, errors.New("read: error")).EOF(), - }, - expected: false, - }, - { - description: "succeeds when both client write and standard output read operations are successful", - client: mocks.NewWriteCloser().OnWrite(5, nil), - flow: &Flow{ - Stdout: mocks.NewReader().OnRead(5, nil).EOF(), - }, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - done := make(chan bool) - - go tc.flow.PipeOut(tc.client, done) - assert.Equal(t, tc.expected, <-done) - }) - } -} - -func TestPipeErr(t *testing.T) { - cases := []struct { - description string - client io.Writer - flow *Flow - expected bool - }{ - { - description: "fails when there's an error on client write", - client: mocks.NewWriteCloser().OnWrite(0, errors.New("write: error")), - flow: &Flow{ - Stderr: mocks.NewReader().OnRead(10, nil).EOF(), - }, - expected: false, - }, - { - description: "fails when there's an error on standard error read", - client: mocks.NewWriteCloser().OnWrite(0, nil), - flow: &Flow{ - Stderr: mocks.NewReader().OnRead(0, errors.New("read: error")).EOF(), - }, - expected: false, - }, - { - description: "succeeds when both client write and standard error read operations are successful", - client: mocks.NewWriteCloser().OnWrite(5, nil), - flow: &Flow{ - Stderr: mocks.NewReader().OnRead(5, nil).EOF(), - }, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - done := make(chan bool) - - go tc.flow.PipeErr(tc.client, done) - assert.Equal(t, tc.expected, <-done) - }) - } -} - -func TestClose(t *testing.T) { - cases := []struct { - description string - flow *Flow - expected error - }{ - { - description: "fails when there's an error on standard input close", - flow: &Flow{ - Stdin: mocks.NewWriteCloser().OnClose(errors.New("close: error")), - }, - expected: errors.New("close: error"), - }, - { - description: "succeeds when standard input close operation is successful", - flow: &Flow{ - Stdin: mocks.NewWriteCloser().OnClose(nil), - }, - expected: nil, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - assert.Equal(t, tc.expected, tc.flow.Close()) - }) - } -} diff --git a/ssh/pkg/magickey/magickey.go b/ssh/pkg/magickey/magickey.go index 754aa0f5261..c6c9786b06b 100644 --- a/ssh/pkg/magickey/magickey.go +++ b/ssh/pkg/magickey/magickey.go @@ -1,3 +1,4 @@ +// Package magickey provides RSA key generation for ShellHub SSH service. package magickey import ( @@ -8,22 +9,13 @@ import ( log "github.com/sirupsen/logrus" ) -var lock = &sync.Mutex{} - -var magicKey *rsa.PrivateKey - -func GetRerefence() *rsa.PrivateKey { - if magicKey == nil { - lock.Lock() - defer lock.Unlock() - - key, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - log.WithError(err).Fatal() - } - - magicKey = key +// GetReference returns a singleton RSA private key for ShellHub SSH service. +// The key is generated once and reused across all subsequent calls. +var GetReference = sync.OnceValue(func() *rsa.PrivateKey { + key, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + log.WithError(err).Fatal() } - return magicKey -} + return key +}) diff --git a/ssh/pkg/magickey/magickey_test.go b/ssh/pkg/magickey/magickey_test.go new file mode 100644 index 00000000000..fd471885d8d --- /dev/null +++ b/ssh/pkg/magickey/magickey_test.go @@ -0,0 +1,87 @@ +package magickey + +import ( + "crypto/rsa" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetReference(t *testing.T) { + tests := []struct { + name string + test func(t *testing.T) + }{ + { + name: "success when called twice returns singleton key", + test: func(t *testing.T) { + key1 := GetReference() + key2 := GetReference() + assert.Same(t, key1, key2) + }, + }, + { + name: "success when key is valid RSA 2048", + test: func(t *testing.T) { + key := GetReference() + assert.NotNil(t, key) + assert.Equal(t, 2048, key.N.BitLen()) + assert.NotNil(t, key.PublicKey) + assert.Equal(t, 2048, key.PublicKey.N.BitLen()) + }, + }, + { + name: "success when key is usable for operations", + test: func(t *testing.T) { + key := GetReference() + assert.NotNil(t, key.Primes) + assert.Len(t, key.Primes, 2) + assert.NotNil(t, key.Precomputed) + }, + }, + { + name: "success when multiple calls return same key", + test: func(t *testing.T) { + keys := make([]*rsa.PrivateKey, 10) + for i := 0; i < 10; i++ { + keys[i] = GetReference() + } + firstKey := keys[0] + for _, key := range keys { + assert.Same(t, firstKey, key) + } + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, tc.test) + } +} + +func TestGetReference_Concurrency(t *testing.T) { + t.Run("success when called concurrently returns singleton", func(t *testing.T) { + const numGoroutines = 100 + keys := make(chan *rsa.PrivateKey, numGoroutines) + for i := 0; i < numGoroutines; i++ { + go func() { + keys <- GetReference() + }() + } + collectedKeys := make([]*rsa.PrivateKey, numGoroutines) + for i := 0; i < numGoroutines; i++ { + collectedKeys[i] = <-keys + } + firstKey := collectedKeys[0] + for _, key := range collectedKeys { + assert.Same(t, firstKey, key) + } + }) +} + +func BenchmarkGetReference(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = GetReference() + } +} diff --git a/ssh/pkg/metadata/constants.go b/ssh/pkg/metadata/constants.go deleted file mode 100644 index 0bbb96799dd..00000000000 --- a/ssh/pkg/metadata/constants.go +++ /dev/null @@ -1,51 +0,0 @@ -package metadata - -const ( - // authentication is the key to store and restore the authentication method. - authentication = "authentication" - - // Password is the key to store and restore the password from the context. - password = "password" - - // fingerprint is the key to store and restore the public key from the context. - fingerprint = "public_key" - - // api is the key to store and restore an instance of internal api client. - api = "api" - - // lookup is the key to store and restore the lookup from the context. - lookup = "lookup" - - // request is the key to store and restore the request type from the context. - request = "request_type" - - // device is the key to store and restore the device from the context. - device = "device" - - // sshid is the key to store and restore the sshid from the context. - sshid = "sshid" - - // agent is the key to store and restore the agent from the context. - agent = "agent" - - // established is the key to store and restore the established state from the context. - established = "established" - - // tag is the key to store and restore the tag from the context. - // - // tag is the device name or the sshid. - tag = "tag" -) - -type AuthenticationMethod int - -const ( - // InvalidAuthenticationMethod represents an invalid InvalidAuthenticationMethod - InvalidAuthenticationMethod AuthenticationMethod = iota - - // PasswordAuthenticationMethod represents the password authentication method. - PasswordAuthenticationMethod - - // PublicKeyAuthenticationMethod represents the public key authentication method. - PublicKeyAuthenticationMethod -) diff --git a/ssh/pkg/metadata/doc.go b/ssh/pkg/metadata/doc.go deleted file mode 100644 index cddd604ba07..00000000000 --- a/ssh/pkg/metadata/doc.go +++ /dev/null @@ -1,10 +0,0 @@ -// Package metadata provides a secure way to store and retrieve data based on a session context, -// preventing race conditions. Each session maintains its own context and won't be overridden by -// another session. -// -// Functions prefixed with `Store` are used to save data, while those prefixed with `MaybeStore` -// are conditional stores. You can also use `Restore` to retrieve stored data. -// -// Additionally, metadata offers a mock implementation for testing the app without requiring a -// real session context. -package metadata diff --git a/ssh/pkg/metadata/metadata.go b/ssh/pkg/metadata/metadata.go deleted file mode 100644 index 4a2af323a15..00000000000 --- a/ssh/pkg/metadata/metadata.go +++ /dev/null @@ -1,153 +0,0 @@ -package metadata - -import ( - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/ssh/pkg/target" - gossh "golang.org/x/crypto/ssh" -) - -type backend struct{} - -var ( - _ Metadata = (*backend)(nil) // ensures that backend implements Metadata - bd Metadata -) - -func init() { - bd = &backend{} -} - -// SetBackend sets the backend for metadata functions -func SetBackend(backend Metadata) { - bd = backend -} - -type Metadata interface { - RestoreRequest(ctx gliderssh.Context) string - RestoreAuthenticationMethod(ctx gliderssh.Context) AuthenticationMethod - RestorePassword(ctx gliderssh.Context) string - RestoreFingerprint(ctx gliderssh.Context) string - RestoreTarget(ctx gliderssh.Context) *target.Target - RestoreAPI(ctx gliderssh.Context) internalclient.Client - RestoreLookup(ctx gliderssh.Context) map[string]string - RestoreDevice(ctx gliderssh.Context) *models.Device - RestoreAgentConn(ctx gliderssh.Context) *gossh.Client - RestoreEstablished(ctx gliderssh.Context) bool - StoreRequest(ctx gliderssh.Context, value string) - StoreAuthenticationMethod(ctx gliderssh.Context, method AuthenticationMethod) - StorePassword(ctx gliderssh.Context, value string) - MaybeStoreSSHID(ctx gliderssh.Context, value string) string - MaybeStoreFingerprint(ctx gliderssh.Context, value string) string - MaybeStoreTarget(ctx gliderssh.Context, sshid string) (*target.Target, error) - MaybeSetAPI(ctx gliderssh.Context, client internalclient.Client) internalclient.Client - MaybeStoreLookup(ctx gliderssh.Context, tag *target.Target, api internalclient.Client) (map[string]string, error) - MaybeStoreDevice(ctx gliderssh.Context, lookup map[string]string, api internalclient.Client) (*models.Device, []error) - MaybeStoreAgentConn(ctx gliderssh.Context, client *gossh.Client) *gossh.Client - MaybeStoreEstablished(ctx gliderssh.Context, value bool) bool -} - -// RestoreRequest restores the request type from context as metadata. -func RestoreRequest(ctx gliderssh.Context) string { - return bd.RestoreRequest(ctx) -} - -// RestoreAuthenticationMethod restores the authentication method from context as metadata. -func RestoreAuthenticationMethod(ctx gliderssh.Context) AuthenticationMethod { - return bd.RestoreAuthenticationMethod(ctx) -} - -// RestorePassword restores the password from context as metadata. -func RestorePassword(ctx gliderssh.Context) string { - return bd.RestorePassword(ctx) -} - -// RestoreFingerprint restores the fingerprint from context as metadata. -func RestoreFingerprint(ctx gliderssh.Context) string { - return bd.RestoreFingerprint(ctx) -} - -// RestoreTarget restores the target from context as metadata. -func RestoreTarget(ctx gliderssh.Context) *target.Target { - return bd.RestoreTarget(ctx) -} - -// RestoreAPI restores the API client from context as metadata. -func RestoreAPI(ctx gliderssh.Context) internalclient.Client { - return bd.RestoreAPI(ctx) -} - -// RestoreLookup restores the lookup from context as metadata. -func RestoreLookup(ctx gliderssh.Context) map[string]string { - return bd.RestoreLookup(ctx) -} - -// RestoreDevice restores the device from context as metadata. -func RestoreDevice(ctx gliderssh.Context) *models.Device { - return bd.RestoreDevice(ctx) -} - -// RestoreAgentConn restores the agent from context as metadata. -func RestoreAgentConn(ctx gliderssh.Context) *gossh.Client { - return bd.RestoreAgentConn(ctx) -} - -// RestoreEstablished restores the connection established status between server and agent from context as metadata. -func RestoreEstablished(ctx gliderssh.Context) bool { - return bd.RestoreEstablished(ctx) -} - -// StoreRequest stores the request type in the context as metadata. -func StoreRequest(ctx gliderssh.Context, value string) { - bd.StoreRequest(ctx, value) -} - -// StoreAuthenticationMethod stores the authentication method in the context/ as metadata. -func StoreAuthenticationMethod(ctx gliderssh.Context, method AuthenticationMethod) { - bd.StoreAuthenticationMethod(ctx, method) -} - -// StorePassword stores the password in the context as metadata. -func StorePassword(ctx gliderssh.Context, value string) { - bd.StorePassword(ctx, value) -} - -// MaybeStoreSSHID stores the SSHID in the context as metadata if is not set yet. -func MaybeStoreSSHID(ctx gliderssh.Context, value string) string { - return bd.MaybeStoreSSHID(ctx, value) -} - -// MaybeStoreFingerprint stores the fingerprint in the context as metadata if is not set yet. -func MaybeStoreFingerprint(ctx gliderssh.Context, value string) string { - return bd.MaybeStoreFingerprint(ctx, value) -} - -// MaybeStoreTarget stores the target in the context as metadata if is not set yet. -func MaybeStoreTarget(ctx gliderssh.Context, sshid string) (*target.Target, error) { - return bd.MaybeStoreTarget(ctx, sshid) -} - -func MaybeSetAPI(ctx gliderssh.Context, client internalclient.Client) internalclient.Client { - return bd.MaybeSetAPI(ctx, client) -} - -// MaybeStoreLookup stores the lookup in the context as metadata if is not set yet. -func MaybeStoreLookup(ctx gliderssh.Context, tag *target.Target, api internalclient.Client) (map[string]string, error) { - return bd.MaybeStoreLookup(ctx, tag, api) -} - -// MaybeStoreDevice stores the device in the context as metadata if is not set yet. -func MaybeStoreDevice(ctx gliderssh.Context, lookup map[string]string, api internalclient.Client) (*models.Device, []error) { - return bd.MaybeStoreDevice(ctx, lookup, api) -} - -// MaybeStoreAgentConn stores the agent in the context as metadata if is not set yet. -func MaybeStoreAgentConn(ctx gliderssh.Context, client *gossh.Client) *gossh.Client { - return bd.MaybeStoreAgentConn(ctx, client) -} - -// MaybeStoreEstablished stores the connection established status between server and agent in the context as metadata if is not set yet. -func MaybeStoreEstablished(ctx gliderssh.Context, value bool) bool { - return bd.MaybeStoreEstablished(ctx, value) -} diff --git a/ssh/pkg/metadata/mocks/metadata.go b/ssh/pkg/metadata/mocks/metadata.go deleted file mode 100644 index cef20f39b59..00000000000 --- a/ssh/pkg/metadata/mocks/metadata.go +++ /dev/null @@ -1,355 +0,0 @@ -// Code generated by mockery v2.32.4. DO NOT EDIT. - -package mocks - -import ( - internalclient "github.com/shellhub-io/shellhub/pkg/api/internalclient" - metadata "github.com/shellhub-io/shellhub/ssh/pkg/metadata" - cryptossh "golang.org/x/crypto/ssh" - - mock "github.com/stretchr/testify/mock" - - models "github.com/shellhub-io/shellhub/pkg/models" - - ssh "github.com/gliderlabs/ssh" - - target "github.com/shellhub-io/shellhub/ssh/pkg/target" -) - -// Metadata is an autogenerated mock type for the Metadata type -type Metadata struct { - mock.Mock -} - -// MaybeSetAPI provides a mock function with given fields: ctx, client -func (_m *Metadata) MaybeSetAPI(ctx ssh.Context, client internalclient.Client) internalclient.Client { - ret := _m.Called(ctx, client) - - var r0 internalclient.Client - if rf, ok := ret.Get(0).(func(ssh.Context, internalclient.Client) internalclient.Client); ok { - r0 = rf(ctx, client) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(internalclient.Client) - } - } - - return r0 -} - -// MaybeStoreAgentConn provides a mock function with given fields: ctx, client -func (_m *Metadata) MaybeStoreAgentConn(ctx ssh.Context, client *cryptossh.Client) *cryptossh.Client { - ret := _m.Called(ctx, client) - - var r0 *cryptossh.Client - if rf, ok := ret.Get(0).(func(ssh.Context, *cryptossh.Client) *cryptossh.Client); ok { - r0 = rf(ctx, client) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*cryptossh.Client) - } - } - - return r0 -} - -// MaybeStoreDevice provides a mock function with given fields: ctx, lookup, api -func (_m *Metadata) MaybeStoreDevice(ctx ssh.Context, lookup map[string]string, api internalclient.Client) (*models.Device, []error) { - ret := _m.Called(ctx, lookup, api) - - var r0 *models.Device - var r1 []error - if rf, ok := ret.Get(0).(func(ssh.Context, map[string]string, internalclient.Client) (*models.Device, []error)); ok { - return rf(ctx, lookup, api) - } - if rf, ok := ret.Get(0).(func(ssh.Context, map[string]string, internalclient.Client) *models.Device); ok { - r0 = rf(ctx, lookup, api) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) - } - } - - if rf, ok := ret.Get(1).(func(ssh.Context, map[string]string, internalclient.Client) []error); ok { - r1 = rf(ctx, lookup, api) - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).([]error) - } - } - - return r0, r1 -} - -// MaybeStoreEstablished provides a mock function with given fields: ctx, value -func (_m *Metadata) MaybeStoreEstablished(ctx ssh.Context, value bool) bool { - ret := _m.Called(ctx, value) - - var r0 bool - if rf, ok := ret.Get(0).(func(ssh.Context, bool) bool); ok { - r0 = rf(ctx, value) - } else { - r0 = ret.Get(0).(bool) - } - - return r0 -} - -// MaybeStoreFingerprint provides a mock function with given fields: ctx, value -func (_m *Metadata) MaybeStoreFingerprint(ctx ssh.Context, value string) string { - ret := _m.Called(ctx, value) - - var r0 string - if rf, ok := ret.Get(0).(func(ssh.Context, string) string); ok { - r0 = rf(ctx, value) - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// MaybeStoreLookup provides a mock function with given fields: ctx, tag, api -func (_m *Metadata) MaybeStoreLookup(ctx ssh.Context, tag *target.Target, api internalclient.Client) (map[string]string, error) { - ret := _m.Called(ctx, tag, api) - - var r0 map[string]string - var r1 error - if rf, ok := ret.Get(0).(func(ssh.Context, *target.Target, internalclient.Client) (map[string]string, error)); ok { - return rf(ctx, tag, api) - } - if rf, ok := ret.Get(0).(func(ssh.Context, *target.Target, internalclient.Client) map[string]string); ok { - r0 = rf(ctx, tag, api) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(map[string]string) - } - } - - if rf, ok := ret.Get(1).(func(ssh.Context, *target.Target, internalclient.Client) error); ok { - r1 = rf(ctx, tag, api) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// MaybeStoreSSHID provides a mock function with given fields: ctx, value -func (_m *Metadata) MaybeStoreSSHID(ctx ssh.Context, value string) string { - ret := _m.Called(ctx, value) - - var r0 string - if rf, ok := ret.Get(0).(func(ssh.Context, string) string); ok { - r0 = rf(ctx, value) - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// MaybeStoreTarget provides a mock function with given fields: ctx, sshid -func (_m *Metadata) MaybeStoreTarget(ctx ssh.Context, sshid string) (*target.Target, error) { - ret := _m.Called(ctx, sshid) - - var r0 *target.Target - var r1 error - if rf, ok := ret.Get(0).(func(ssh.Context, string) (*target.Target, error)); ok { - return rf(ctx, sshid) - } - if rf, ok := ret.Get(0).(func(ssh.Context, string) *target.Target); ok { - r0 = rf(ctx, sshid) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*target.Target) - } - } - - if rf, ok := ret.Get(1).(func(ssh.Context, string) error); ok { - r1 = rf(ctx, sshid) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RestoreAPI provides a mock function with given fields: ctx -func (_m *Metadata) RestoreAPI(ctx ssh.Context) internalclient.Client { - ret := _m.Called(ctx) - - var r0 internalclient.Client - if rf, ok := ret.Get(0).(func(ssh.Context) internalclient.Client); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(internalclient.Client) - } - } - - return r0 -} - -// RestoreAgentConn provides a mock function with given fields: ctx -func (_m *Metadata) RestoreAgentConn(ctx ssh.Context) *cryptossh.Client { - ret := _m.Called(ctx) - - var r0 *cryptossh.Client - if rf, ok := ret.Get(0).(func(ssh.Context) *cryptossh.Client); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*cryptossh.Client) - } - } - - return r0 -} - -// RestoreAuthenticationMethod provides a mock function with given fields: ctx -func (_m *Metadata) RestoreAuthenticationMethod(ctx ssh.Context) metadata.AuthenticationMethod { - ret := _m.Called(ctx) - - var r0 metadata.AuthenticationMethod - if rf, ok := ret.Get(0).(func(ssh.Context) metadata.AuthenticationMethod); ok { - r0 = rf(ctx) - } else { - r0 = ret.Get(0).(metadata.AuthenticationMethod) - } - - return r0 -} - -// RestoreDevice provides a mock function with given fields: ctx -func (_m *Metadata) RestoreDevice(ctx ssh.Context) *models.Device { - ret := _m.Called(ctx) - - var r0 *models.Device - if rf, ok := ret.Get(0).(func(ssh.Context) *models.Device); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Device) - } - } - - return r0 -} - -// RestoreEstablished provides a mock function with given fields: ctx -func (_m *Metadata) RestoreEstablished(ctx ssh.Context) bool { - ret := _m.Called(ctx) - - var r0 bool - if rf, ok := ret.Get(0).(func(ssh.Context) bool); ok { - r0 = rf(ctx) - } else { - r0 = ret.Get(0).(bool) - } - - return r0 -} - -// RestoreFingerprint provides a mock function with given fields: ctx -func (_m *Metadata) RestoreFingerprint(ctx ssh.Context) string { - ret := _m.Called(ctx) - - var r0 string - if rf, ok := ret.Get(0).(func(ssh.Context) string); ok { - r0 = rf(ctx) - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// RestoreLookup provides a mock function with given fields: ctx -func (_m *Metadata) RestoreLookup(ctx ssh.Context) map[string]string { - ret := _m.Called(ctx) - - var r0 map[string]string - if rf, ok := ret.Get(0).(func(ssh.Context) map[string]string); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(map[string]string) - } - } - - return r0 -} - -// RestorePassword provides a mock function with given fields: ctx -func (_m *Metadata) RestorePassword(ctx ssh.Context) string { - ret := _m.Called(ctx) - - var r0 string - if rf, ok := ret.Get(0).(func(ssh.Context) string); ok { - r0 = rf(ctx) - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// RestoreRequest provides a mock function with given fields: ctx -func (_m *Metadata) RestoreRequest(ctx ssh.Context) string { - ret := _m.Called(ctx) - - var r0 string - if rf, ok := ret.Get(0).(func(ssh.Context) string); ok { - r0 = rf(ctx) - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// RestoreTarget provides a mock function with given fields: ctx -func (_m *Metadata) RestoreTarget(ctx ssh.Context) *target.Target { - ret := _m.Called(ctx) - - var r0 *target.Target - if rf, ok := ret.Get(0).(func(ssh.Context) *target.Target); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*target.Target) - } - } - - return r0 -} - -// StoreAuthenticationMethod provides a mock function with given fields: ctx, method -func (_m *Metadata) StoreAuthenticationMethod(ctx ssh.Context, method metadata.AuthenticationMethod) { - _m.Called(ctx, method) -} - -// StorePassword provides a mock function with given fields: ctx, value -func (_m *Metadata) StorePassword(ctx ssh.Context, value string) { - _m.Called(ctx, value) -} - -// StoreRequest provides a mock function with given fields: ctx, value -func (_m *Metadata) StoreRequest(ctx ssh.Context, value string) { - _m.Called(ctx, value) -} - -// NewMetadata creates a new instance of Metadata. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewMetadata(t interface { - mock.TestingT - Cleanup(func()) -}) *Metadata { - mock := &Metadata{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/ssh/pkg/metadata/restore.go b/ssh/pkg/metadata/restore.go deleted file mode 100644 index 2f2202c7895..00000000000 --- a/ssh/pkg/metadata/restore.go +++ /dev/null @@ -1,103 +0,0 @@ -package metadata - -import ( - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/ssh/pkg/target" - gossh "golang.org/x/crypto/ssh" -) - -func restore(ctx gliderssh.Context, key string) interface{} { - return ctx.Value(key) -} - -func (*backend) RestoreRequest(ctx gliderssh.Context) string { - value := restore(ctx, request) - if value == nil { - return "" - } - - return value.(string) -} - -func (*backend) RestoreAuthenticationMethod(ctx gliderssh.Context) AuthenticationMethod { - value := restore(ctx, authentication) - if value == nil { - return InvalidAuthenticationMethod - } - - return value.(AuthenticationMethod) -} - -func (*backend) RestorePassword(ctx gliderssh.Context) string { - value := restore(ctx, password) - if value == nil { - return "" - } - - return value.(string) -} - -func (*backend) RestoreFingerprint(ctx gliderssh.Context) string { - value := restore(ctx, fingerprint) - if value == nil { - return "" - } - - return value.(string) -} - -func (*backend) RestoreTarget(ctx gliderssh.Context) *target.Target { - value := restore(ctx, tag) - if value == nil { - return nil - } - - return value.(*target.Target) -} - -func (*backend) RestoreAPI(ctx gliderssh.Context) internalclient.Client { - value := restore(ctx, api) - if value == nil { - return nil - } - - return value.(internalclient.Client) -} - -func (*backend) RestoreLookup(ctx gliderssh.Context) map[string]string { - value := restore(ctx, lookup) - if value == nil { - return nil - } - - return value.(map[string]string) -} - -func (*backend) RestoreDevice(ctx gliderssh.Context) *models.Device { - value := restore(ctx, device) - if value == nil { - return nil - } - - return value.(*models.Device) -} - -func (*backend) RestoreAgentConn(ctx gliderssh.Context) *gossh.Client { - value := restore(ctx, agent) - if value == nil { - return nil - } - - return value.(*gossh.Client) -} - -func (*backend) RestoreEstablished(ctx gliderssh.Context) bool { - value := restore(ctx, established) - if value == nil { - return false - } - - return value.(bool) -} diff --git a/ssh/pkg/metadata/restore_test.go b/ssh/pkg/metadata/restore_test.go deleted file mode 100644 index c4b7658ea15..00000000000 --- a/ssh/pkg/metadata/restore_test.go +++ /dev/null @@ -1,750 +0,0 @@ -package metadata - -import ( - "testing" - "time" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/ssh/pkg/sshsrvtest" - "github.com/shellhub-io/shellhub/ssh/pkg/target" - "github.com/stretchr/testify/assert" - gossh "golang.org/x/crypto/ssh" -) - -// **NOTICE**: -// Each test case has a `setup` method responsible for creating and running the server. -// In these cases, we need to expose the session's context outside the handler. -// To achieve this, we pass a reference to an existing context to write on. - -func TestRestore(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "succeeds", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(request, "exec") - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "exec", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, restore(ctx, request)) - }) - } -} - -func TestRestoreRequest(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "fails when request is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "", - }, - { - description: "succeeds in restoring request type", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(request, "exec") - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "exec", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreRequest(ctx)) - }) - } -} - -func TestRestoreAuthenticationMethod(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected AuthenticationMethod - }{ - { - description: "fails when authentication method is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: InvalidAuthenticationMethod, - }, - { - description: "succeeds in restoring authentication method", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(authentication, PasswordAuthenticationMethod) - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: PasswordAuthenticationMethod, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreAuthenticationMethod(ctx)) - }) - } -} - -func TestRestorePassword(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "fails when password is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "", - }, - { - description: "succeeds in restoring password", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(password, "secret") - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "secret", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestorePassword(ctx)) - }) - } -} - -func TestRestoreFingerprint(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "fails when fingerprint is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "", - }, - { - description: "succeeds in restoring fingerprint", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(fingerprint, "fingerprint") - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "fingerprint", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreFingerprint(ctx)) - }) - } -} - -func TestRestoreTarget(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected *target.Target - }{ - { - description: "fails when target is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: nil, - }, - { - description: "succeeds in restoring target", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(tag, &target.Target{ - Username: "username", - Data: "namespace.00-00-00-00-00-00@localhost", - }) - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: &target.Target{ - Username: "username", - Data: "namespace.00-00-00-00-00-00@localhost", - }, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreTarget(ctx)) - }) - } -} - -func TestRestoreAPI(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected internalclient.Client - }{ - { - description: "fails when api is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: nil, - }, - { - description: "succeeds in restoring api", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(api, *new(internalclient.Client)) - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: *new(internalclient.Client), - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreAPI(ctx)) - }) - } -} - -func TestRestoreLookup(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected map[string]string - }{ - { - description: "fails when lookup is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: nil, - }, - { - description: "succeeds in restoring lookup", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(lookup, map[string]string{"foo": "bar"}) - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: map[string]string{"foo": "bar"}, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreLookup(ctx)) - }) - } -} - -func TestRestoreDevice(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected *models.Device - }{ - { - description: "fails when device is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: nil, - }, - { - description: "succeeds in restoring device", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue( - device, - &models.Device{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "hostname", - Identity: &models.DeviceIdentity{MAC: "mac"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - ) - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: &models.Device{ - CreatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - StatusUpdatedAt: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - LastSeen: time.Date(2023, 1, 1, 12, 0, 0, 0, time.UTC), - UID: "2300230e3ca2f637636b4d025d2235269014865db5204b6d115386cbee89809c", - Name: "hostname", - Identity: &models.DeviceIdentity{MAC: "mac"}, - Info: nil, - PublicKey: "", - TenantID: "00000000-0000-4000-0000-000000000000", - Online: true, - Namespace: "namespace", - Status: "accepted", - RemoteAddr: "", - Position: nil, - Tags: []string{"tag1"}, - PublicURL: false, - PublicURLAddress: "", - Acceptable: false, - }, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreDevice(ctx)) - }) - } -} - -func TestRestoreAgent(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected *gossh.Client - }{ - { - description: "fails when agent is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: nil, - }, - { - description: "succeeds in restoring agent", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(agent, &gossh.Client{}) - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: &gossh.Client{}, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreAgentConn(ctx)) - }) - } -} - -func TestRestoreEstablished(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected bool - }{ - { - description: "fails when established is not set", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: false, - }, - { - description: "succeeds in restoring established", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(established, true) - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, RestoreEstablished(ctx)) - }) - } -} diff --git a/ssh/pkg/metadata/store.go b/ssh/pkg/metadata/store.go deleted file mode 100644 index 010bf3d83e2..00000000000 --- a/ssh/pkg/metadata/store.go +++ /dev/null @@ -1,115 +0,0 @@ -package metadata - -import ( - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/ssh/pkg/target" - gossh "golang.org/x/crypto/ssh" -) - -// store stores a value into a context. -func store(ctx gliderssh.Context, key string, value interface{}) { - ctx.SetValue(key, value) -} - -func (*backend) StoreRequest(ctx gliderssh.Context, value string) { - store(ctx, request, value) -} - -func (*backend) StoreAuthenticationMethod(ctx gliderssh.Context, method AuthenticationMethod) { - store(ctx, authentication, method) -} - -func (*backend) StorePassword(ctx gliderssh.Context, value string) { - store(ctx, password, value) -} - -// maybeStore stores a value into a context if it does not exist yet. If the value already exists, it will be returned. -// -// Its return must be cast. -func maybeStore(ctx gliderssh.Context, key string, value interface{}) interface{} { - if got := restore(ctx, key); got != nil { - return got - } - - store(ctx, key, value) - - return value -} - -func (*backend) MaybeStoreSSHID(ctx gliderssh.Context, value string) string { - return maybeStore(ctx, sshid, value).(string) -} - -func (*backend) MaybeStoreFingerprint(ctx gliderssh.Context, value string) string { - return maybeStore(ctx, fingerprint, value).(string) -} - -func (*backend) MaybeStoreTarget(ctx gliderssh.Context, sshid string) (*target.Target, error) { - if got := restore(ctx, tag); got != nil { - return got.(*target.Target), nil - } - - value, err := target.NewTarget(sshid) - if err != nil { - return nil, err - } - - return maybeStore(ctx, tag, value).(*target.Target), nil -} - -func (*backend) MaybeSetAPI(ctx gliderssh.Context, client internalclient.Client) internalclient.Client { - value := maybeStore(ctx, api, client) - if value == nil { - return nil - } - - return value.(internalclient.Client) -} - -func (*backend) MaybeStoreLookup(ctx gliderssh.Context, tag *target.Target, api internalclient.Client) (map[string]string, error) { - var value map[string]string - setValue := func(namespace, hostname string) { - value = map[string]string{ - "domain": namespace, - "name": hostname, - } - } - if tag.IsSSHID() { - var namespace, hostname string - namespace, hostname, err := tag.SplitSSHID() - if err != nil { - return nil, err - } - - setValue(namespace, hostname) - } else { - var device *models.Device - device, err := api.GetDevice(tag.Data) - if err != nil { - return nil, err - } - - setValue(device.Namespace, device.Name) - } - - return maybeStore(ctx, lookup, value).(map[string]string), nil -} - -func (*backend) MaybeStoreDevice(ctx gliderssh.Context, lookup map[string]string, api internalclient.Client) (*models.Device, []error) { - value, errs := api.DeviceLookup(lookup) - if len(errs) > 0 { - return nil, errs - } - - return maybeStore(ctx, device, value).(*models.Device), nil -} - -func (*backend) MaybeStoreAgentConn(ctx gliderssh.Context, client *gossh.Client) *gossh.Client { - return maybeStore(ctx, agent, client).(*gossh.Client) -} - -func (*backend) MaybeStoreEstablished(ctx gliderssh.Context, value bool) bool { - return maybeStore(ctx, established, value).(bool) -} diff --git a/ssh/pkg/metadata/store_test.go b/ssh/pkg/metadata/store_test.go deleted file mode 100644 index 9052f59a8b5..00000000000 --- a/ssh/pkg/metadata/store_test.go +++ /dev/null @@ -1,569 +0,0 @@ -package metadata - -import ( - "testing" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/ssh/pkg/sshsrvtest" - "github.com/shellhub-io/shellhub/ssh/pkg/target" - "github.com/stretchr/testify/assert" - gossh "golang.org/x/crypto/ssh" -) - -// **NOTICE**: -// Each test case has a `setup` method responsible for creating and running the server. -// In these cases, we need to expose the session's context (and sometimes the error) outside the handler. -// To achieve this, we pass a reference to an existing context to write on. - -func TestStore(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "succeeds in storing key/value", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - store(*ctx, "key", "val") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "val", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, ctx.Value("key").(string)) - }) - } -} - -func TestStoreRequest(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "succeeds in storing request type", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - StoreRequest(*ctx, "exec") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "exec", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, ctx.Value(request).(string)) - }) - } -} - -func TestStoreAuthenticationMethod(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected AuthenticationMethod - }{ - { - description: "succeeds in storing authentication method", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - StoreAuthenticationMethod(*ctx, PasswordAuthenticationMethod) - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: PasswordAuthenticationMethod, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, ctx.Value(authentication).(AuthenticationMethod)) - }) - } -} - -func TestStorePassword(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "succeeds in storing password", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - StorePassword(*ctx, "123") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "123", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, ctx.Value(password).(string)) - }) - } -} - -func TestMaybeStore(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "succeeds in storing key/value", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - maybeStore(*ctx, "key", "val") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "val", - }, - { - description: "succeeds in retrieving fingerprint when it is already defined", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue("key", "val") - *ctx = s.Context() - maybeStore(*ctx, "key", "other value") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "val", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, ctx.Value("key").(string)) - }) - } -} - -func TestMaybeStoreSSHID(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "succeeds in storing sshid", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - MaybeStoreSSHID(*ctx, "namespace.00-00-00-00-00-00@localhost") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "namespace.00-00-00-00-00-00@localhost", - }, - { - description: "succeeds in retrieving sshid when it is already defined", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(sshid, "namespace.00-00-00-00-00-00@localhost") - *ctx = s.Context() - MaybeStoreSSHID(*ctx, "other sshid") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "namespace.00-00-00-00-00-00@localhost", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, ctx.Value(sshid).(string)) - }) - } -} - -func TestMaybeStoreFingerprint(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected string - }{ - { - description: "succeeds in storing fingerprint", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - MaybeStoreFingerprint(*ctx, "fingerprint") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "fingerprint", - }, - { - description: "succeeds in retrieving fingerprint when it is already defined", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(fingerprint, "fingerprint") - *ctx = s.Context() - MaybeStoreFingerprint(*ctx, "other value") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: "fingerprint", - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, ctx.Value(fingerprint).(string)) - }) - } -} - -func TestMaybeStoreTarget(t *testing.T) { - type Expected struct { - target *target.Target - err error - } - - cases := []struct { - description string - setup func(ctx *gliderssh.Context, err *error) *sshsrvtest.Conn - expected Expected - }{ - { - description: "fails when target is invalid", - setup: func(ctx *gliderssh.Context, err *error) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - _, *err = MaybeStoreTarget(*ctx, "username") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: Expected{ - target: nil, - err: target.ErrSplitTarget, - }, - }, - { - description: "succeeds when target is valid", - setup: func(ctx *gliderssh.Context, err *error) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - _, *err = MaybeStoreTarget(*ctx, "username@namespace.00-00-00-00-00-00@localhost") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: Expected{ - target: &target.Target{ - Username: "username", - Data: "namespace.00-00-00-00-00-00@localhost", - }, - err: nil, - }, - }, - { - description: "succeeds in retrieving target when it is already defined", - setup: func(ctx *gliderssh.Context, err *error) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(tag, &target.Target{Username: "username", Data: "namespace.00-00-00-00-00-00@localhost"}) - *ctx = s.Context() - _, *err = MaybeStoreTarget(*ctx, "other value") - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: Expected{ - target: &target.Target{ - Username: "username", - Data: "namespace.00-00-00-00-00-00@localhost", - }, - err: nil, - }, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - var err error - - srv := tc.setup(&ctx, &err) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - if err != nil { - assert.Equal(t, tc.expected, Expected{nil, err}) - } else { - assert.Equal(t, tc.expected, Expected{ctx.Value(tag).(*target.Target), err}) - } - }) - } -} - -func TestMaybeStoreEstablished(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - expected bool - }{ - { - description: "succeeds in storing established", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - MaybeStoreEstablished(*ctx, true) - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: true, - }, - { - description: "succeeds in retrieving established when it is already defined", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - return sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - s.Context().SetValue(established, true) - - *ctx = s.Context() - MaybeStoreEstablished(*ctx, false) - }, - }, - &gossh.ClientConfig{ - User: "user", - Auth: []gossh.AuthMethod{ - gossh.Password("123"), - }, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - }, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - - srv.Start() - defer srv.Teardown() - - assert.NoError(t, srv.Agent.Run("")) - assert.Equal(t, tc.expected, ctx.Value(established).(bool)) - }) - } -} diff --git a/ssh/pkg/target/target_test.go b/ssh/pkg/target/target_test.go index c947854830b..22c8e619be2 100644 --- a/ssh/pkg/target/target_test.go +++ b/ssh/pkg/target/target_test.go @@ -91,7 +91,7 @@ func TestSplitSSHID(t *testing.T) { expected Expected }{ { - description: "failst when when Data does not contain a dot", + description: "fails when when Data does not contain a dot", target: &Target{ Username: "username", Data: "username@localhost", diff --git a/ssh/pkg/tunnel/tunnel.go b/ssh/pkg/tunnel/tunnel.go deleted file mode 100644 index ea7589a98fc..00000000000 --- a/ssh/pkg/tunnel/tunnel.go +++ /dev/null @@ -1,54 +0,0 @@ -package tunnel - -import ( - "context" - "net" - "net/http" - - "github.com/labstack/echo/v4" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/httptunnel" - log "github.com/sirupsen/logrus" -) - -type Tunnel struct { - Tunnel *httptunnel.Tunnel - API internalclient.Client -} - -func NewTunnel(connection, dial string) *Tunnel { - tunnel := &Tunnel{ - Tunnel: httptunnel.NewTunnel(connection, dial), - API: internalclient.NewClient(), - } - - tunnel.Tunnel.ConnectionHandler = func(request *http.Request) (string, error) { - return request.Header.Get(internalclient.DeviceUIDHeader), nil - } - tunnel.Tunnel.CloseHandler = func(id string) { - if err := internalclient.NewClient().DevicesOffline(id); err != nil { - log.Error(err) - } - } - tunnel.Tunnel.KeepAliveHandler = func(id string) { - if err := tunnel.API.DevicesHeartbeat(id); err != nil { - log.Error(err) - } - } - - return tunnel -} - -func (t *Tunnel) GetRouter() *echo.Echo { - router, ok := t.Tunnel.Router().(*echo.Echo) - if !ok { - // TODO: should the Connect does not up when this assertion fail? - log.Error("type assertion failed") - } - - return router -} - -func (t *Tunnel) Dial(ctx context.Context, id string) (net.Conn, error) { - return t.Tunnel.Dial(ctx, id) -} diff --git a/ssh/refresh.yml b/ssh/refresh.yml index ce7c4750885..9e5f89ab2a7 100644 --- a/ssh/refresh.yml +++ b/ssh/refresh.yml @@ -5,7 +5,7 @@ included_extensions: - .go build_target_path: "" build_path: /go/src/github.com/shellhub-io/shellhub/ssh -build_flags: ["-tags", "internal_api"] +build_flags: ["-tags", "internal_api", "-gcflags=all=-N -l"] build_delay: 200ns binary_name: ssh command_flags: [] diff --git a/ssh/server/auth/doc.go b/ssh/server/auth/doc.go index 266a427c30b..39f709f5d96 100644 --- a/ssh/server/auth/doc.go +++ b/ssh/server/auth/doc.go @@ -1,9 +1,6 @@ // Package auth provides authentication handlers for client connections. // -// This package includes two authentication methods: PasswordHandler and PublicKeyHandler. -// PasswordHandler is the second authentication method tried by the server to connect the client to the agent, -// while PublicKeyHandler is the first authentication method attempted. -// -// The authentication handler receives a session context and attempts to store essential data, such as -// SSHID and target. It returns `true` if the authentication process succeeds and `false` otherwise. +// This package includes two authentication methods: [PasswordHandler] and [PublicKeyHandler]. +// [PasswordHandler] is the second authentication method tried by the server to connect the client to the agent, +// while [PublicKeyHandler] is the first authentication method attempted. package auth diff --git a/ssh/server/auth/password.go b/ssh/server/auth/password.go index 864380df60e..7cea216af98 100644 --- a/ssh/server/auth/password.go +++ b/ssh/server/auth/password.go @@ -1,56 +1,42 @@ package auth import ( + "net" + gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" + "github.com/shellhub-io/shellhub/ssh/session" log "github.com/sirupsen/logrus" ) // PasswordHandler handles ShellHub client's connection using the password authentication method. -// Password authentication is the second authentication method tried by the server to connect the client to the agent. -// It receives the password from the client and attempts to authenticate it. -// Returns true if the password authentication method is used and false otherwise. -func PasswordHandler(ctx gliderssh.Context, password string) bool { - sshid := metadata.MaybeStoreSSHID(ctx, ctx.User()) - - log.WithFields(log.Fields{"session": ctx.SessionID(), "sshid": sshid}). - Trace("trying to use password authentication") - - tag, err := metadata.MaybeStoreTarget(ctx, sshid) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": ctx.SessionID(), "sshid": sshid}). - Error("failed to parse sshid to target") +func PasswordHandler(ctx gliderssh.Context, passwd string) bool { + logger := log.WithFields( + log.Fields{ + "uid": ctx.SessionID(), + "sshid": ctx.User(), + }) - return false - } + logger.Trace("trying to use password authentication") - api := metadata.MaybeSetAPI(ctx, internalclient.NewClient()) + sess, state := session.ObtainSession(ctx) + if state < session.StateEvaluated { + logger.Trace("failed to get the session from context on password handler") - lookup, err := metadata.MaybeStoreLookup(ctx, tag, api) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": ctx.SessionID(), "sshid": sshid}). - Error("failed to store lookup") + conn, ok := ctx.Value("conn").(net.Conn) + if ok { + conn.Close() + } return false } - _, errs := metadata.MaybeStoreDevice(ctx, lookup, api) - if len(errs) > 0 { - log.WithError(err). - WithFields(log.Fields{"session": ctx.SessionID(), "sshid": sshid}). - Error("failed to store the device") + if err := sess.Auth(ctx, session.AuthPassword(passwd)); err != nil { + logger.Warn("failed to authenticate on device using password") return false } - metadata.StorePassword(ctx, password) - metadata.StoreAuthenticationMethod(ctx, metadata.PasswordAuthenticationMethod) - - log.WithFields(log.Fields{"session": ctx.SessionID(), "sshid": sshid}). - Info("using password authentication method to connect the client to agent") + logger.Info("succeeded to use password authentication.") return true } diff --git a/ssh/server/auth/password_test.go b/ssh/server/auth/password_test.go deleted file mode 100644 index e07683648a7..00000000000 --- a/ssh/server/auth/password_test.go +++ /dev/null @@ -1,219 +0,0 @@ -package auth - -import ( - "errors" - "testing" - - gliderssh "github.com/gliderlabs/ssh" - internalclientMocks "github.com/shellhub-io/shellhub/pkg/api/internalclient/mocks" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" - metadataMocks "github.com/shellhub-io/shellhub/ssh/pkg/metadata/mocks" - "github.com/shellhub-io/shellhub/ssh/pkg/sshsrvtest" - "github.com/shellhub-io/shellhub/ssh/pkg/target" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - gossh "golang.org/x/crypto/ssh" -) - -func TestPasswordHandler(t *testing.T) { - cases := []struct { - description string - password string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - mocks func(ctx gliderssh.Context) - expected bool - }{ - { - description: "fails when could not store the target", - password: "secret", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(nil, errors.New("error")). - Once() - }, - expected: false, - }, - { - description: "fails when could not store the lookup", - password: "secret", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - tag := &target.Target{Username: "user", Data: "namespace.00-00-00-00-00-00"} - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(tag, nil). - Once() - - api := new(internalclientMocks.Client) - // Since MaybeSetAPI uses `internalclient.NewClient()` as an argument, using `api` here would result in a memory error. - metadataMock.On("MaybeSetAPI", ctx, mock.Anything). - Return(api). - Once() - - // lookup := map[string]string{} - metadataMock.On("MaybeStoreLookup", ctx, tag, api). - Return(nil, errors.New("error")). - Once() - }, - expected: false, - }, - { - description: "fails when could not store the device", - password: "secret", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - tag := &target.Target{Username: "user", Data: "namespace.00-00-00-00-00-00"} - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(tag, nil). - Once() - - api := new(internalclientMocks.Client) - // Since MaybeSetAPI uses `internalclient.NewClient()` as an argument, using `api` here would result in a memory error. - metadataMock.On("MaybeSetAPI", ctx, mock.Anything). - Return(api). - Once() - - lookup := map[string]string{} - metadataMock.On("MaybeStoreLookup", ctx, tag, api). - Return(lookup, nil). - Once() - - metadataMock.On("MaybeStoreDevice", ctx, lookup, api). - Return(nil, []error{errors.New("error")}). - Once() - }, - expected: false, - }, - { - description: "succeeds to authenticate the session", - password: "secret", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - tag := &target.Target{Username: "user", Data: "namespace.00-00-00-00-00-00"} - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(tag, nil). - Once() - - api := new(internalclientMocks.Client) - // Since MaybeSetAPI uses `internalclient.NewClient()` as an argument, using `api` here would result in a memory error. - metadataMock.On("MaybeSetAPI", ctx, mock.Anything). - Return(api). - Once() - - lookup := map[string]string{} - metadataMock.On("MaybeStoreLookup", ctx, tag, api). - Return(lookup, nil). - Once() - - metadataMock.On("MaybeStoreDevice", ctx, lookup, api). - Return(nil, []error{}). - Once() - - metadataMock.On("StorePassword", ctx, "secret") - metadataMock.On("StoreAuthenticationMethod", ctx, metadata.PasswordAuthenticationMethod) - }, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - defer srv.Teardown() - - srv.Start() - assert.NoError(t, srv.Agent.Run("cmd")) - - tc.mocks(ctx) - - result := PasswordHandler(ctx, tc.password) - assert.Equal(t, tc.expected, result) - }) - } -} diff --git a/ssh/server/auth/publickey.go b/ssh/server/auth/publickey.go index 453cde63c29..7d9fc28a6dc 100644 --- a/ssh/server/auth/publickey.go +++ b/ssh/server/auth/publickey.go @@ -1,115 +1,44 @@ package auth import ( + "net" + gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/ssh/pkg/magickey" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" + "github.com/shellhub-io/shellhub/ssh/session" log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" + "golang.org/x/crypto/ssh" ) // PublicKeyHandler handles ShellHub client's connection using the public key authentication method. -// Public key authentication is the first authentication method tried by the server to connect the client to the agent. -// It receives the public key from the client and attempts to authenticate it. -// Returns true if the public key authentication method is used and false otherwise. func PublicKeyHandler(ctx gliderssh.Context, publicKey gliderssh.PublicKey) bool { - sshid := metadata.MaybeStoreSSHID(ctx, ctx.User()) - fingerprint := metadata.MaybeStoreFingerprint(ctx, gossh.FingerprintLegacyMD5(publicKey)) - - log.WithFields(log.Fields{ - "session": ctx.SessionID(), - "sshid": sshid, - "fingerprint": fingerprint, - }).Trace("trying to use public key authentication") - - tag, err := metadata.MaybeStoreTarget(ctx, sshid) - if err != nil { - log.WithError(err). - WithFields(log.Fields{ - "session": ctx.SessionID(), - "sshid": sshid, - "fingerprint": fingerprint, - }). - Error("failed to parse sshid to target") - - return false - } - - api := metadata.MaybeSetAPI(ctx, internalclient.NewClient()) - - lookup, err := metadata.MaybeStoreLookup(ctx, tag, api) - if err != nil { - log.WithError(err). - WithFields(log.Fields{ - "session": ctx.SessionID(), - "sshid": sshid, - "fingerprint": fingerprint, - }). - Error("failed to store lookup") - - return false - } - - device, errs := metadata.MaybeStoreDevice(ctx, lookup, api) - if len(errs) > 0 { - log.WithError(err). - WithFields(log.Fields{ - "session": ctx.SessionID(), - "sshid": sshid, - "fingerprint": fingerprint, - }). - Error("failed to store the device") + logger := log.WithFields( + log.Fields{ + "uid": ctx.SessionID(), + "sshid": ctx.User(), + "key": ssh.MarshalAuthorizedKey(publicKey), + }) + + logger.Trace("trying to use public key authentication") + + sess, state := session.ObtainSession(ctx) + if state < session.StateEvaluated { + logger.Trace("failed to get the session from context on public key handler") + + conn, ok := ctx.Value("conn").(net.Conn) + if ok { + conn.Close() + } return false } - magic, err := gossh.NewPublicKey(&magickey.GetRerefence().PublicKey) - if err != nil { - log.WithError(err). - WithFields(log.Fields{ - "session": ctx.SessionID(), - "sshid": sshid, - "fingerprint": fingerprint, - }). - Error("failed to create a new public key") + if err := sess.Auth(ctx, session.AuthPublicKey(publicKey)); err != nil { + logger.Warn("failed to authenticate on device using public key") return false } - if gossh.FingerprintLegacyMD5(magic) != fingerprint { - if _, err = api.GetPublicKey(fingerprint, device.TenantID); err != nil { - log.WithError(err). - WithFields(log.Fields{ - "session": ctx.SessionID(), - "sshid": sshid, - "fingerprint": fingerprint, - }). - Error("failed to get the existent public key") - - return false - } - - if ok, err := api.EvaluateKey(fingerprint, device, tag.Username); !ok || err != nil { - log.WithError(err). - WithFields(log.Fields{ - "session": ctx.SessionID(), - "sshid": sshid, - "fingerprint": fingerprint, - }). - Error("failed to evaluate the key") - - return false - } - } - - metadata.StoreAuthenticationMethod(ctx, metadata.PublicKeyAuthenticationMethod) - - log.WithFields(log.Fields{ - "session": ctx.SessionID(), - "sshid": sshid, - "fingerprint": fingerprint, - }).Info("using public key authentication method to connect the client to agent") + logger.Info("succeeded to use public key authentication.") return true } diff --git a/ssh/server/auth/publickey_test.go b/ssh/server/auth/publickey_test.go deleted file mode 100644 index cc6dd1f9764..00000000000 --- a/ssh/server/auth/publickey_test.go +++ /dev/null @@ -1,368 +0,0 @@ -package auth - -import ( - "crypto/rand" - "crypto/rsa" - "errors" - "testing" - - gliderssh "github.com/gliderlabs/ssh" - internalclientMocks "github.com/shellhub-io/shellhub/pkg/api/internalclient/mocks" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" - metadataMocks "github.com/shellhub-io/shellhub/ssh/pkg/metadata/mocks" - "github.com/shellhub-io/shellhub/ssh/pkg/sshsrvtest" - "github.com/shellhub-io/shellhub/ssh/pkg/target" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - gossh "golang.org/x/crypto/ssh" -) - -func generateTestPubKey(t *testing.T) gliderssh.PublicKey { - privateKey, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - t.Fatal(err) - } - - publicRsaKey, err := gossh.NewPublicKey(&privateKey.PublicKey) - if err != nil { - t.Fatal(err) - } - - return publicRsaKey -} - -func TestPublicKeyHandler(t *testing.T) { - cases := []struct { - description string - setup func(ctx *gliderssh.Context) *sshsrvtest.Conn - mocks func(ctx gliderssh.Context) - expected bool - }{ - { - description: "fails when could not store the target", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - metadataMock.On("MaybeStoreFingerprint", ctx, mock.Anything). - Return("fingerprint"). - Once() - - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(nil, errors.New("error")). - Once() - }, - expected: false, - }, - { - description: "fails when could not store the lookup", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - metadataMock.On("MaybeStoreFingerprint", ctx, mock.Anything). - Return("fingerprint"). - Once() - - tag := &target.Target{Username: "user", Data: "namespace.00-00-00-00-00-00"} - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(tag, nil). - Once() - - api := new(internalclientMocks.Client) - // Since MaybeSetAPI uses `internalclient.NewClient()` as an argument, using `api` here would result in a memory error. - metadataMock.On("MaybeSetAPI", ctx, mock.Anything). - Return(api). - Once() - - // lookup := map[string]string{} - metadataMock.On("MaybeStoreLookup", ctx, tag, api). - Return(nil, errors.New("error")). - Once() - }, - expected: false, - }, - { - description: "fails when could not store the device", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - metadataMock.On("MaybeStoreFingerprint", ctx, mock.Anything). - Return("fingerprint"). - Once() - - tag := &target.Target{Username: "user", Data: "namespace.00-00-00-00-00-00"} - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(tag, nil). - Once() - - api := new(internalclientMocks.Client) - // Since MaybeSetAPI uses `internalclient.NewClient()` as an argument, using `api` here would result in a memory error. - metadataMock.On("MaybeSetAPI", ctx, mock.Anything). - Return(api). - Once() - - lookup := map[string]string{} - metadataMock.On("MaybeStoreLookup", ctx, tag, api). - Return(lookup, nil). - Once() - - metadataMock.On("MaybeStoreDevice", ctx, lookup, api). - Return(nil, []error{errors.New("error")}). - Once() - }, - expected: false, - }, - { - description: "fails when could not get the public key", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - metadataMock.On("MaybeStoreFingerprint", ctx, mock.Anything). - Return("fingerprint"). - Once() - - tag := &target.Target{Username: "user", Data: "namespace.00-00-00-00-00-00"} - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(tag, nil). - Once() - - api := new(internalclientMocks.Client) - // Since MaybeSetAPI uses `internalclient.NewClient()` as an argument, using `api` here would result in a memory error. - metadataMock.On("MaybeSetAPI", ctx, mock.Anything). - Return(api). - Once() - - lookup := map[string]string{} - metadataMock.On("MaybeStoreLookup", ctx, tag, api). - Return(lookup, nil). - Once() - - metadataMock.On("MaybeStoreDevice", ctx, lookup, api). - Return(&models.Device{TenantID: "00000000-0000-4000-0000-000000000000"}, []error{}). - Once() - - api.On("GetPublicKey", "fingerprint", "00000000-0000-4000-0000-000000000000"). - Return(nil, errors.New("error")). - Once() - }, - expected: false, - }, - { - description: "fails when could not evaluate the key", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - metadataMock.On("MaybeStoreFingerprint", ctx, mock.Anything). - Return("fingerprint"). - Once() - - tag := &target.Target{Username: "user", Data: "namespace.00-00-00-00-00-00"} - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(tag, nil). - Once() - - api := new(internalclientMocks.Client) - // Since MaybeSetAPI uses `internalclient.NewClient()` as an argument, using `api` here would result in a memory error. - metadataMock.On("MaybeSetAPI", ctx, mock.Anything). - Return(api). - Once() - - lookup := map[string]string{} - metadataMock.On("MaybeStoreLookup", ctx, tag, api). - Return(lookup, nil). - Once() - - metadataMock.On("MaybeStoreDevice", ctx, lookup, api). - Return(&models.Device{TenantID: "00000000-0000-4000-0000-000000000000"}, []error{}). - Once() - - api.On("GetPublicKey", "fingerprint", "00000000-0000-4000-0000-000000000000"). - Return(nil, nil). - Once() - - api.On("EvaluateKey", "fingerprint", &models.Device{TenantID: "00000000-0000-4000-0000-000000000000"}, "user"). - Return(false, errors.New("error")). - Once() - }, - expected: false, - }, - { - description: "succeeds to authenticate the session", - setup: func(ctx *gliderssh.Context) *sshsrvtest.Conn { - srv := sshsrvtest.New( - &gliderssh.Server{ - Handler: func(s gliderssh.Session) { - *ctx = s.Context() - }, - }, - &gossh.ClientConfig{ - User: "user@namespace.00-00-00-00-00-00", - HostKeyCallback: gossh.InsecureIgnoreHostKey(), - }, - ) - - return srv - }, - mocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("MaybeStoreSSHID", ctx, "user@namespace.00-00-00-00-00-00"). - Return("user@namespace.00-00-00-00-00-00"). - Once() - - metadataMock.On("MaybeStoreFingerprint", ctx, mock.Anything). - Return("fingerprint"). - Once() - - tag := &target.Target{Username: "user", Data: "namespace.00-00-00-00-00-00"} - metadataMock.On("MaybeStoreTarget", ctx, "user@namespace.00-00-00-00-00-00"). - Return(tag, nil). - Once() - - api := new(internalclientMocks.Client) - // Since MaybeSetAPI uses `internalclient.NewClient()` as an argument, using `api` here would result in a memory error. - metadataMock.On("MaybeSetAPI", ctx, mock.Anything). - Return(api). - Once() - - lookup := map[string]string{} - metadataMock.On("MaybeStoreLookup", ctx, tag, api). - Return(lookup, nil). - Once() - - metadataMock.On("MaybeStoreDevice", ctx, lookup, api). - Return(&models.Device{TenantID: "00000000-0000-4000-0000-000000000000"}, []error{}). - Once() - - api.On("GetPublicKey", "fingerprint", "00000000-0000-4000-0000-000000000000"). - Return(nil, nil). - Once() - - api.On("EvaluateKey", "fingerprint", &models.Device{TenantID: "00000000-0000-4000-0000-000000000000"}, "user"). - Return(true, nil). - Once() - - metadataMock.On("StoreAuthenticationMethod", ctx, metadata.PublicKeyAuthenticationMethod) - }, - expected: true, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - var ctx gliderssh.Context - - srv := tc.setup(&ctx) - defer srv.Teardown() - - srv.Start() - assert.NoError(t, srv.Agent.Run("cmd")) - - tc.mocks(ctx) - - result := PublicKeyHandler(ctx, generateTestPubKey(t)) - assert.Equal(t, tc.expected, result) - }) - } -} diff --git a/ssh/server/channels/channels.go b/ssh/server/channels/channels.go new file mode 100644 index 00000000000..126db75324f --- /dev/null +++ b/ssh/server/channels/channels.go @@ -0,0 +1,16 @@ +package channels + +const ( + // DirectTCPIPChannel is the channel type for direct-tcpip channels like "local port forwarding" and "dynamic + // application-level port forwarding". + // + // Local port forwarding is used to forward a port from the client to the server, and dynamic application-level + // is a method for securely tunneling and routing network traffic through an SSH connection to access remote + // resources as if they were local. + // + // Example of local port forwarding: `ssh -L 8080:localhost:80 user@sshid`. + // + // Example of dynamic application-level port forwarding: `ssh -D 1080 user@sshid`. + DirectTCPIPChannel = "direct-tcpip" + SessionChannel = "session" +) diff --git a/ssh/server/channels/session.go b/ssh/server/channels/session.go new file mode 100644 index 00000000000..84b1dc591cb --- /dev/null +++ b/ssh/server/channels/session.go @@ -0,0 +1,366 @@ +package channels + +import ( + "strings" + "sync" + + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/ssh/session" + log "github.com/sirupsen/logrus" + gossh "golang.org/x/crypto/ssh" +) + +// KeepAliveRequestTypePrefix Through the time, the [KeepAliveRequestType] type sent from agent to server changed its +// name, but always keeping the prefix "keepalive". So, to maintain the retro compatibility, we check if this prefix +// exists and perform the necessary operations. +const KeepAliveRequestTypePrefix string = "keepalive" + +const ( + // Once the session has been set up, a program is started at the remote end. The program can be a shell, an + // application program, or a subsystem with a host-independent name. Only one of these requests can succeed per + // channel + // + // https://www.rfc-editor.org/rfc/rfc4254#section-6.5 + ShellRequestType = "shell" + // This message will request that the server start the execution of the given command. The 'command' string may + // contain a path. Normal precautions MUST be taken to prevent the execution of unauthorized commands. + // + // https://www.rfc-editor.org/rfc/rfc4254#section-6.5 + ExecRequestType = "exec" + // This last form executes a predefined subsystem. It is expected that these will include a general file transfer + // mechanism, and possibly other features. Implementations may also allow configuring more such mechanisms. As + // the user's shell is usually used to execute the subsystem, it is advisable for the subsystem protocol to have a + // "magic cookie" at the beginning of the protocol transaction to distinguish it from arbitrary output generated + // by shell initialization scripts, etc. This spurious output from the shell may be filtered out either at the + // server or at the client. + // + // https://www.rfc-editor.org/rfc/rfc4254#section-6.5 + SubsystemRequestType = "subsystem" + // A pseudo-terminal can be allocated for the session by sending the following message. + // + // The 'encoded terminal modes' are described in Section 8. Zero dimension parameters MUST be ignored. The + // character/row dimensions override the pixel dimensions (when nonzero). Pixel dimensions refer to the drawable + // area of the window. + // + // https://www.rfc-editor.org/rfc/rfc4254#section-6.2 + PtyRequestType = "pty-req" + // When the window (terminal) size changes on the client side, it MAY send a message to the other side to inform it + // of the new dimensions. + // + // https://www.rfc-editor.org/rfc/rfc4254#section-6.7 + WindowChangeRequestType = "window-change" + // It is a custom request type that the Agent sends to maintain the session alive, even when no data is sent. + KeepAliveRequestType = KeepAliveRequestTypePrefix + "@shellhub.io" + // When the command running at the other end terminates, the following message can be sent to return the exit + // status of the command. Returning the status is RECOMMENDED. + // + // https://www.rfc-editor.org/rfc/rfc4254#section-6.10 + ExitStatusRequest = "exit-status" + // The remote command may also terminate violently due to a signal. Such a condition can be indicated by the + // following message. A zero 'exit_status' usually means that the command terminated successfully. + // + // https://datatracker.ietf.org/doc/html/rfc4254#section-6.10 + ExitSignalRequest = "exit-signal" +) + +// A client may request agent forwarding for a previously opened session using the following channel request. This +// request is sent after the channel has been opened, but before a [ShellRequestType], command or +// [SubsystemRequestType] has been executed. +// +// https://www.ietf.org/archive/id/draft-miller-ssh-agent-11.html#section-4.1 +const AuthRequestOpenSSHRequest = "auth-agent-req@openssh.com" + +// After a client has requested that a session have agent forwarding enabled, the server later may request a connection +// to the forwarded agent. The server does this by requesting a dedicated channel to communicate with the client's +// agent. +// +// https://www.ietf.org/archive/id/draft-miller-ssh-agent-11.html#section-4.2 +const AuthRequestOpenSSHChannel = "auth-agent@openssh.com" + +// DefaultSessionHandler is the default handler for session's channel. +// +// A session is a remote execution of a program. The program may be a shell, an application, a system command, or some +// built-in subsystem. It may or may not have a TTY, and may or may not involve X11 forwarding. +// +// https://www.rfc-editor.org/rfc/rfc4254#section-6 +func DefaultSessionHandler() gliderssh.ChannelHandler { + return func(_ *gliderssh.Server, conn *gossh.ServerConn, newChan gossh.NewChannel, ctx gliderssh.Context) { + sess, _ := session.ObtainSession(ctx) + + go func() { + // NOTE: As [gossh.ServerConn] is shared by all channels calls, close it after a channel close block any + // other channel invocation. To avoid it, we wait for the connection to be closed to finish the session. + conn.Wait() //nolint:errcheck + + sess.Finish() //nolint:errcheck + }() + + logger := log.WithFields( + log.Fields{ + "uid": sess.UID, + "sshid": sess.SSHID, + "device": sess.Device.UID, + "username": sess.Target.Username, + "ip": sess.IPAddress, + }) + + reject := func(err error, msg string) { + logger.WithError(err).Error(msg) + + newChan.Reject(gossh.ConnectionFailed, msg) //nolint:errcheck + } + + logger.Info("session channel started") + defer logger.Info("session channel done") + + seat, err := sess.NewSeat() + if err != nil { + reject(err, "failed to create a new seat on the SSH session") + + return + } + + client, err := sess.NewClientChannel(newChan, seat) + if err != nil { + reject(err, "failed to accept the channel opening") + + return + } + + defer client.Close() + + agent, err := sess.NewAgentChannel(SessionChannel, seat) + if err != nil { + reject(err, "failed to open the session channel on agent") + + return + } + + defer agent.Close() + + var wg sync.WaitGroup + + done := make(chan bool) + + oncePipe := sync.OnceFunc(func() { + go pipe(sess, client.Channel, agent.Channel, seat, done) + }) + + wg.Add(3) + + go func() { + defer wg.Done() + + for { + select { + case <-ctx.Done(): + logger.Info("context has done (global requests)") + + return + case req, ok := <-sess.Agent.Requests: + if !ok { + logger.Trace("global requests is closed") + + return + } + + logger.Debugf("global request from agent: %s", req.Type) + + switch { + // NOTE: The Agent sends "keepalive" requests to the server to avoid the Web Socket being closed due + // to inactivity. Through the time, the request type sent from agent to server changed its name, but + // always keeping the prefix "keepalive". So, to maintain the retro compatibility, we check if this + // prefix exists and perform the necessary operations. + case strings.HasPrefix(req.Type, KeepAliveRequestTypePrefix): + if _, err := client.Channel.SendRequest(KeepAliveRequestType, req.WantReply, req.Payload); err != nil { + logger.Error("failed to send the keepalive request received from agent to client") + + return + } + + if err := sess.KeepAlive(ctx); err != nil { + logger.WithError(err).Error("failed to send the API request to inform that the session is open") + + return + } + default: + if req.WantReply { + if err := req.Reply(false, nil); err != nil { + logger.WithError(err).Error(err) + } + } + } + } + } + }() + + go func() { + defer wg.Done() + defer func() { + logger.Debug("agent waiting for data done to close client") + + <-done + client.Close() + }() + + for { + select { + case <-ctx.Done(): + logger.Info("context has done (agent requests)") + + return + case req, ok := <-agent.Requests: + if !ok { + logger.Trace("agent requests is closed") + + return + } + + switch req.Type { + case ExitStatusRequest: + session.Event[models.SSHExitStatus](sess, req.Type, req.Payload, seat) + case ExitSignalRequest: + session.Event[models.SSHSignal](sess, req.Type, req.Payload, seat) + default: + sess.Event(req.Type, req.Payload, seat) + } + + logger.Debugf("request from agent to client: %s", req.Type) + + ok, err := client.Channel.SendRequest(req.Type, req.WantReply, req.Payload) + if err != nil { + logger.WithError(err).Error("failed to send the request from agent to client") + + continue + } + + if req.WantReply { + if err := req.Reply(ok, nil); err != nil { + logger.WithError(err).Error(err) + } + } + } + } + }() + + go func() { + defer wg.Done() + + for { + select { + case <-ctx.Done(): + logger.Info("context has done (client requests)") + + return + case req, ok := <-client.Requests: + if !ok { + logger.Trace("client requests is closed") + + return + } + + switch req.Type { + case ShellRequestType: + if seat, ok := sess.Seats.Get(seat); ok && seat.HasPty { + if err := sess.Announce(client.Channel); err != nil { + logger.WithError(err).Warn("failed to get the namespace announcement") + } + } + + sess.Event(req.Type, req.Payload, seat) + case ExecRequestType, SubsystemRequestType: + session.Event[models.SSHCommand](sess, req.Type, req.Payload, seat) + + sess.Type = ExecRequestType + case PtyRequestType: + var pty models.SSHPty + + if err := gossh.Unmarshal(req.Payload, &pty); err != nil { + reject(nil, "failed to recover the session dimensions") + } + + sess.Seats.SetPty(seat, true) + + sess.Event(req.Type, pty, seat) //nolint:errcheck + case WindowChangeRequestType: + var dimensions models.SSHWindowChange + + if err := gossh.Unmarshal(req.Payload, &dimensions); err != nil { + reject(nil, "failed to recover the session dimensions") + } + + sess.Event(req.Type, dimensions, seat) //nolint:errcheck + case AuthRequestOpenSSHRequest: + gliderssh.SetAgentRequested(ctx) + + sess.Event(req.Type, req.Payload, seat) + go func() { + clientConn := ctx.Value(gliderssh.ContextKeyConn).(gossh.Conn) + agentChannels := sess.Agent.Client.HandleChannelOpen(AuthRequestOpenSSHChannel) + + for { + newAgentChannel, ok := <-agentChannels + if !ok { + reject(nil, "channel for agent forwarding done") + + return + } + + agentChannel, agentReqs, err := newAgentChannel.Accept() + if err != nil { + reject(nil, "failed to accept the chanel request from agent on auth request") + + return + } + + defer agentChannel.Close() + go gossh.DiscardRequests(agentReqs) + + clientChannel, clientReqs, err := clientConn.OpenChannel(AuthRequestOpenSSHChannel, nil) + if err != nil { + reject(nil, "failed to open the auth request channel from agent to client") + + return + } + + defer clientChannel.Close() + go gossh.DiscardRequests(clientReqs) + + hose(sess, agentChannel, clientChannel) + + logger.WithError(err).Trace("auth request channel piping done") + } + }() + default: + sess.Event(req.Type, req.Payload, seat) + } + + logger.Debugf("request from client to agent: %s", req.Type) + + ok, err := agent.Channel.SendRequest(req.Type, req.WantReply, req.Payload) + if err != nil { + logger.WithError(err).Error("failed to send the request from client to agent") + + continue + } + + if req.WantReply { + if err := req.Reply(ok, nil); err != nil { + logger.WithError(err).Error(err) + } + } + + switch req.Type { + case PtyRequestType, ExecRequestType, SubsystemRequestType: + oncePipe() + } + } + } + }() + + wg.Wait() + + logger.Debug("session done after waiting") + } +} diff --git a/ssh/server/channels/tcpip.go b/ssh/server/channels/tcpip.go index 7b00cf966c6..421ac78b6b3 100644 --- a/ssh/server/channels/tcpip.go +++ b/ssh/server/channels/tcpip.go @@ -4,197 +4,171 @@ import ( "io" "net" "strconv" + "sync" gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/httptunnel" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" "github.com/shellhub-io/shellhub/ssh/session" log "github.com/sirupsen/logrus" gossh "golang.org/x/crypto/ssh" ) -const ( - // DirectTCPIPChannel is the channel type for direct-tcpip channels like "local port forwarding" and "dynamic - // application-level port forwarding". - // - // Local port forwarding is used to forward a port from the client to the server, and dynamic application-level - // is a method for securely tunneling and routing network traffic through an SSH connection to access remote - // resources as if they were local. - // - // Example of local port forwarding: `ssh -L 8080:localhost:80 user@sshid`. - // - // Example of dynamic application-level port forwarding: `ssh -D 1080 user@sshid`. - DirectTCPIPChannel = "direct-tcpip" -) +// DefaultDirectTCPIPHandler is the channel's handler for direct-tcpip channels like "local port forwarding" and "dynamic +// application-level port forwarding". +func DefaultDirectTCPIPHandler(server *gliderssh.Server, conn *gossh.ServerConn, newChan gossh.NewChannel, ctx gliderssh.Context) { + sess, _ := session.ObtainSession(ctx) + go func() { + // NOTICE: As [gossh.ServerConn] is shared by all channels calls, close it after a channel close block any + // other channel involkation. To avoid it, we wait for the connection be closed to finish the sesison. + conn.Wait() //nolint:errcheck + + sess.Finish() //nolint:errcheck + }() + + log.WithFields(log.Fields{ + "username": sess.Target.Username, + "sshid": sess.Target.Data, + }).Trace("handling direct-tcpip channel") + + type channelData struct { + DestAddr string `json:"dest_addr"` + DestPort uint32 `json:"dest_port"` + OriginAddr string `json:"origin_addr"` + OriginPort uint32 `json:"origin_port"` + } + + data := new(channelData) + if err := gossh.Unmarshal(newChan.ExtraData(), data); err != nil { + newChan.Reject(gossh.ConnectionFailed, "failed to parse forward data: "+err.Error()) //nolint:errcheck + log.WithError(err).WithFields(log.Fields{ + "username": sess.Target.Username, + "sshid": sess.Target.Data, + "origin_port": data.OriginAddr, + "origin_addr": data.OriginPort, + "dest_port": data.DestPort, + "dest_addr": data.DestAddr, + }).Error("failed to parse forward data") -func TunnelDefaultDirectTCPIPHandler(tunnel *httptunnel.Tunnel) func(server *gliderssh.Server, _ *gossh.ServerConn, newChan gossh.NewChannel, ctx gliderssh.Context) { - return func(server *gliderssh.Server, conn *gossh.ServerConn, newChan gossh.NewChannel, ctx gliderssh.Context) { - target := metadata.RestoreTarget(ctx) + return + } + if server.LocalPortForwardingCallback == nil || !server.LocalPortForwardingCallback(ctx, data.DestAddr, data.DestPort) { + newChan.Reject(gossh.Prohibited, "port forwarding is disabled") //nolint:errcheck log.WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - }).Info("handling direct-tcpip channel") - - type channelData struct { - DestAddr string - DestPort uint32 - OriginAddr string - OriginPort uint32 - } + "username": sess.Target.Username, + "sshid": sess.Target.Data, + "origin_port": data.OriginAddr, + "origin_addr": data.OriginPort, + "dest_port": data.DestPort, + "dest_addr": data.DestAddr, + }).Info("port forwarding is disabled") - data := new(channelData) - if err := gossh.Unmarshal(newChan.ExtraData(), data); err != nil { - newChan.Reject(gossh.ConnectionFailed, "faild to parse forward data: "+err.Error()) //nolint:errcheck - log.WithError(err).WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Error("failed to parse forward data") + return + } - return - } + dest := net.JoinHostPort(data.DestAddr, strconv.FormatInt(int64(data.DestPort), 10)) - if server.LocalPortForwardingCallback == nil || !server.LocalPortForwardingCallback(ctx, data.DestAddr, data.DestPort) { - newChan.Reject(gossh.Prohibited, "port forwarding is disabled") //nolint:errcheck - log.WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Info("port forwarding is disabled") + // NOTE: Certain SSH connections may not necessitate a dedicated handler, such as an SSH handler. + // In such instances, a new connection to the agent is generated and saved in the metadata for + // subsequent use. + // An illustrative scenario is when the SSH connection is initiated with the "-N" flag. + connection := sess.Agent.Client - return - } + agent, err := connection.Dial("tcp", dest) + if err != nil { + newChan.Reject(gossh.ConnectionFailed, "failed dialing the agent to host and port: "+err.Error()) //nolint:errcheck + log.WithError(err).WithFields(log.Fields{ + "username": sess.Target.Username, + "sshid": sess.Target.Data, + "origin_port": data.OriginAddr, + "origin_addr": data.OriginPort, + "dest_port": data.DestPort, + "dest_addr": data.DestAddr, + }).Error("failed dialing the agent to host and port") - dest := net.JoinHostPort(data.DestAddr, strconv.FormatInt(int64(data.DestPort), 10)) - config, err := session.NewClientConfiguration(ctx) - if err != nil { - newChan.Reject(gossh.ConnectionFailed, "error creating client configuration: "+err.Error()) //nolint:errcheck - log.WithError(err).WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Error("error creating client configuration") + return + } - return - } + defer agent.Close() - // NOTE: Certain SSH connections may not necessitate a dedicated handler, such as an SSH handler. - // In such instances, a new connection to the agent is generated and saved in the metadata for - // subsequent use. - // An illustrative scenario is when the SSH connection is initiated with the "-N" flag. - connection := metadata.RestoreAgentConn(ctx) - if connection == nil { - sess, err := session.NewSessionWithoutClient(ctx, tunnel) - if err != nil { - newChan.Reject(gossh.ConnectionFailed, "failed to create session") //nolint:errcheck - log.WithError(err).WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Error("failed to create session") - - return - } - - conn, _, err := sess.NewClientConnWithDeadline(config) - if err != nil { - newChan.Reject(gossh.ConnectionFailed, "failed creating client connection: "+err.Error()) //nolint:errcheck - log.WithError(err).WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Error("failed creating agent connection") - - return - } - - metadata.MaybeStoreAgentConn(ctx, conn) - connection = conn - } + client, reqs, err := newChan.Accept() + if err != nil { + newChan.Reject(gossh.ConnectionFailed, "failed accepting the channel: "+err.Error()) //nolint:errcheck + log.WithError(err).WithFields(log.Fields{ + "username": sess.Target.Username, + "sshid": sess.Target.Data, + "origin_port": data.OriginAddr, + "origin_addr": data.OriginPort, + "dest_port": data.DestPort, + "dest_addr": data.DestAddr, + }).Error("failed accepting the channel") - agent, err := connection.Dial("tcp", dest) - if err != nil { - newChan.Reject(gossh.ConnectionFailed, "failed dialing the agent to host and port: "+err.Error()) //nolint:errcheck - log.WithError(err).WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Error("failed dialing the agent to host and port") + return + } - return - } + defer client.Close() + + go gossh.DiscardRequests(reqs) + + log.WithFields(log.Fields{ + "username": sess.Target.Username, + "sshid": sess.Target.Data, + "origin_port": data.OriginAddr, + "origin_addr": data.OriginPort, + "dest_port": data.DestPort, + "dest_addr": data.DestAddr, + }).Info("piping data between client and agent") + + wg := new(sync.WaitGroup) - channel, reqs, err := newChan.Accept() - if err != nil { - newChan.Reject(gossh.ConnectionFailed, "failed accepting the channel: "+err.Error()) //nolint:errcheck - log.WithError(err).WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Error("failed accepting the channel") + // TODO: control the running state of these goroutines. + wg.Add(1) + go func() { + defer wg.Done() + + log.WithFields(log.Fields{ + "username": sess.Target.Username, + "sshid": sess.Target.Data, + "origin_port": data.OriginAddr, + "origin_addr": data.OriginPort, + "dest_port": data.DestPort, + "dest_addr": data.DestAddr, + }).Trace("copying data from client to agent") + + if _, err := io.Copy(client, agent); err != nil && err != io.EOF { + log.WithError(err).Error("failed to copy data from agent to client") return } + }() - go gossh.DiscardRequests(reqs) + wg.Add(1) + go func() { + defer wg.Done() log.WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, + "username": sess.Target.Username, + "sshid": sess.Target.Data, "origin_port": data.OriginAddr, "origin_addr": data.OriginPort, "dest_port": data.DestPort, "dest_addr": data.DestAddr, - }).Info("piping data between client and agent") - - // TODO: control the running state of these goroutines. - go func() { - log.WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Debug("copying data from client to agent") - - defer channel.Close() - io.Copy(channel, agent) //nolint:errcheck - }() - go func() { - log.WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "origin_port": data.OriginAddr, - "origin_addr": data.OriginPort, - "dest_port": data.DestPort, - "dest_addr": data.DestAddr, - }).Debug("copying data from agent to client") - - defer channel.Close() - io.Copy(agent, channel) //nolint:errcheck - }() - } + }).Trace("copying data from agent to client") + + if _, err := io.Copy(agent, client); err != nil && err != io.EOF { + log.WithError(err).Error("failed to copy data from client to agent") + + return + } + }() + + wg.Wait() + + log.WithFields(log.Fields{ + "username": sess.Target.Username, + "sshid": sess.Target.Data, + "origin_port": data.OriginAddr, + "origin_addr": data.OriginPort, + "dest_port": data.DestPort, + "dest_addr": data.DestAddr, + }).Trace("handling direct-tcpip finished") } diff --git a/ssh/server/channels/utils.go b/ssh/server/channels/utils.go new file mode 100644 index 00000000000..5128d8b9f3e --- /dev/null +++ b/ssh/server/channels/utils.go @@ -0,0 +1,154 @@ +package channels + +import ( + "io" + "sync" + + "github.com/Masterminds/semver" + "github.com/shellhub-io/shellhub/pkg/envs" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/ssh/session" + log "github.com/sirupsen/logrus" + gossh "golang.org/x/crypto/ssh" +) + +type Recorder struct { + // session is the session between Agent and Client. + session *session.Session + // seat is the current identifier of session's. + seat int +} + +func NewRecorder(session *session.Session, seat int) (io.Writer, error) { + return &Recorder{ + session: session, + seat: seat, + }, nil +} + +// PtyOutputEventType is the event's type for an output. +const PtyOutputEventType = "pty-output" + +func (c *Recorder) Write(output []byte) (int, error) { + // NOTE: Writes the event into the event stream to be processed and send to target endpoint. + c.session.Event(PtyOutputEventType, &models.SSHPtyOutput{ + Output: string(output), + }, c.seat) + + return len(output), nil // len output +} + +// pipe function pipes data between client and agent, and vice versa, recording each frame when ShellHub instance are +// Cloud or Enterprise. +func pipe(sess *session.Session, client gossh.Channel, agent gossh.Channel, seat int, done chan bool) { + defer log. + WithFields(log.Fields{"session": sess.UID, "sshid": sess.SSHID}). + Trace("data pipe between client and agent has done") + + wg := new(sync.WaitGroup) + wg.Add(2) + + c := io.MultiReader(client, client.Stderr()) + a := io.MultiReader(agent, agent.Stderr()) + + go func() { + defer wg.Done() + defer client.CloseWrite() + defer func() { + done <- true + }() + + writers := []io.Writer{client} + if envs.IsEnterprise() || envs.IsCloud() { + recorder, err := NewRecorder(sess, seat) + if err != nil { + log.WithError(err). + WithFields(log.Fields{"session": sess.UID, "sshid": sess.SSHID}). + Warning("failed to connect to session record endpoint") + } + + if err := sess.Recorded(seat); err != nil { + log.WithError(err). + WithFields(log.Fields{"session": sess.UID, "sshid": sess.SSHID}). + Warning("failed to set the session as recorded") + + // NOTE: When we fail to update the session status to record, we don't send session's chunks to storage. + recorder = nil + } + + if recorder != nil { + writers = append(writers, recorder) + } + } + + multi := io.MultiWriter(writers...) + if _, err := io.Copy(multi, a); err != nil && err != io.EOF { + log.WithError(err).Error("failed on coping data from client to agent") + } + + log.Trace("agent channel data copy done") + }() + + go func() { + defer wg.Done() + defer func() { + // NOTE: When request is [ExecRequestType] and agent's version is less than v0.9.2, we should close the agent + // connection to avoid it be hanged after data flow ends. + if ver, err := semver.NewVersion(sess.Device.Info.Version); ver != nil && err == nil { + // NOTE: We indicate here v0.9.3, but it is not included due the assertion `less than`. + if ver.LessThan(semver.MustParse("v0.9.3")) && sess.Type == ExecRequestType { + agent.Close() + } else { + agent.CloseWrite() //nolint:errcheck + } + } else { + agent.CloseWrite() //nolint:errcheck + } + }() + + if _, err := io.Copy(agent, c); err != nil && err != io.EOF { + log.WithError(err).Error("failed on coping data from client to agent") + } + + log.Trace("client channel data copy done") + }() + + wg.Wait() +} + +// hose is a generic version of [pipe] function without the record capability. +func hose(sess *session.Session, agent gossh.Channel, client gossh.Channel) { + defer log. + WithFields(log.Fields{"session": sess.UID, "sshid": sess.SSHID}). + Trace("data pipe between client and agent has done") + + wg := new(sync.WaitGroup) + wg.Add(2) + + a := io.MultiReader(agent, agent.Stderr()) + c := io.MultiReader(client, client.Stderr()) + + go func() { + defer wg.Done() + defer agent.CloseWrite() //nolint:errcheck + + if _, err := io.Copy(agent, c); err != nil && err != io.EOF { + log.WithError(err).Error("failed on coping data from client to agent") + } + + log.Trace("agent channel data copy done") + }() + + go func() { + defer wg.Done() + defer client.CloseWrite() //nolint:errcheck + + if _, err := io.Copy(client, a); err != nil && err != io.EOF { + log.WithError(err).Error("failed on coping data from agent to client") + } + + log.Trace("client channel data copy done") + }() + + wg.Wait() +} diff --git a/ssh/server/handler/doc.go b/ssh/server/handler/doc.go deleted file mode 100644 index 667627cedd1..00000000000 --- a/ssh/server/handler/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package handler handlers a ShellHub client`s connection to Connect server. -package handler diff --git a/ssh/server/handler/sftp.go b/ssh/server/handler/sftp.go deleted file mode 100644 index b1beae81cfe..00000000000 --- a/ssh/server/handler/sftp.go +++ /dev/null @@ -1,119 +0,0 @@ -package handler - -import ( - "context" - "fmt" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/httptunnel" - "github.com/shellhub-io/shellhub/ssh/pkg/flow" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" - "github.com/shellhub-io/shellhub/ssh/session" - log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" -) - -const SFTPSubsystem = "sftp" - -// SFTPSubsystemHandler handlers a SFTP connection. -func SFTPSubsystemHandler(tunnel *httptunnel.Tunnel) gliderssh.SubsystemHandler { - return func(client gliderssh.Session) { - log.WithFields(log.Fields{"sshid": client.User()}).Info("SFTP connection started") - defer log.WithFields(log.Fields{"sshid": client.User()}).Info("SFTP connection closed") - - defer client.Close() - - ctx := client.Context() - api := metadata.RestoreAPI(ctx) - - sess, err := session.NewSession(client, tunnel) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"sshid": client.User()}). - Error("Error when trying to create a new session") - - client.Write([]byte(fmt.Sprintf("%s\n", err.Error()))) // nolint: errcheck - - return - } - - defer sess.Finish() // nolint:errcheck - - config, err := session.NewClientConfiguration(ctx) - if err != nil { - writeError(sess, "Error while creating client configuration", err, ErrConfiguration) - - return - } - - if err = connectSFTP(ctx, client, sess, api, config); err != nil { - writeError(sess, "Error during SSH connection", err, err) - - return - } - } -} - -func connectSFTP(ctx context.Context, client gliderssh.Session, sess *session.Session, api internalclient.Client, config *gossh.ClientConfig) error { - connection, reqs, err := sess.NewClientConnWithDeadline(config) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("Error when trying to authenticate the connection") - - return ErrAuthentication - } - - agent, err := connection.NewSession() - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("Error when trying to start the agent's session") - - return ErrSession - } - - defer agent.Close() - - log.WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Debug("requesting a subsystem for session") - if err = agent.RequestSubsystem(SFTPSubsystem); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to request a subsystem") - - return err - } - - go session.HandleRequests(ctx, reqs, api, ctx.Done()) - - if errs := api.SessionAsAuthenticated(sess.UID); len(errs) > 0 { - log.WithError(errs[0]). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to authenticate the session") - - return errs[0] - } - - flw, err := flow.NewFlow(agent) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to create a flow of data from agent") - - return err - } - - done := make(chan bool) - - go flw.PipeIn(client, done) - go flw.PipeOut(client, done) - go flw.PipeErr(client, done) - - <-done - <-done - <-done - - return nil -} diff --git a/ssh/server/handler/ssh.go b/ssh/server/handler/ssh.go deleted file mode 100644 index 0a48f692ffa..00000000000 --- a/ssh/server/handler/ssh.go +++ /dev/null @@ -1,481 +0,0 @@ -package handler - -import ( - "bytes" - "context" - "fmt" - "io" - - "github.com/Masterminds/semver" - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/envs" - "github.com/shellhub-io/shellhub/pkg/httptunnel" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/ssh/pkg/flow" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" - "github.com/shellhub-io/shellhub/ssh/session" - log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" -) - -// Errors returned by handlers to client. -var ( - ErrRequestShell = fmt.Errorf("failed to open a shell in the device") - ErrRequestExec = fmt.Errorf("failed to exec the command in the device") - ErrRequestHeredoc = fmt.Errorf("failed to exec the command as heredoc in the device") - ErrRequestUnsupported = fmt.Errorf("failed to get the request type") - ErrPublicKey = fmt.Errorf("failed to get the parsed public key") - ErrPrivateKey = fmt.Errorf("failed to get a key data from the server") - ErrSigner = fmt.Errorf("failed to create a signer from the private key") - ErrConnect = fmt.Errorf("failed to connect to device") - ErrSession = fmt.Errorf("failed to create a session between the server to the agent") - ErrGetAuth = fmt.Errorf("failed to get auth data from key") - ErrWebData = fmt.Errorf("failed to get the data to connect to device") - ErrFindDevice = fmt.Errorf("failed to find the device") - ErrFindPublicKey = fmt.Errorf("failed to get the public key from the server") - ErrEvaluatePublicKey = fmt.Errorf("failed to evaluate the public key in the server") - ErrForbiddenPublicKey = fmt.Errorf("failed to use the public key for this action") - ErrDataPublicKey = fmt.Errorf("failed to parse the public key data") - ErrSignaturePublicKey = fmt.Errorf("failed to decode the public key signature") - ErrVerifyPublicKey = fmt.Errorf("failed to verify the public key") - ErrSignerPublicKey = fmt.Errorf("failed to signer the public key") - ErrDialSSH = fmt.Errorf("failed to dial to connect to server") - ErrEnvIPAddress = fmt.Errorf("failed to set the env virable of ip address from client") - ErrEnvWS = fmt.Errorf("failed to set the env virable of web socket from client") - ErrPipe = fmt.Errorf("failed to pipe client data to agent") - ErrPty = fmt.Errorf("failed to request the pty to agent") - ErrShell = fmt.Errorf("failed to get the shell to agent") - ErrTarget = fmt.Errorf("failed to get client target") - ErrAuthentication = fmt.Errorf("failed to authenticate to device") - ErrEnvs = fmt.Errorf("failed to parse server envs") - ErrConfiguration = fmt.Errorf("failed to create communication configuration") - ErrInvalidVersion = fmt.Errorf("failed to parse device version") - ErrUnsuportedPublicKeyAuth = fmt.Errorf("connections using public keys are not permitted when the agent version is 0.5.x or earlier") -) - -type ConfigOptions struct { - RecordURL string `env:"RECORD_URL"` - - // Allows SSH to connect with an agent via a public key when the agent version is less than 0.6.0. - // Agents 0.5.x or earlier do not validate the public key request and may panic. - // Please refer to: https://github.com/shellhub-io/shellhub/issues/3453 - AllowPublickeyAccessBelow060 bool `env:"ALLOW_PUBLIC_KEY_ACCESS_BELLOW_0_6_0,default=false"` -} - -func parseConfig() (*ConfigOptions, error) { - return envs.Parse[ConfigOptions]() -} - -// SSHHandler handlers a "normal" SSH connection. -func SSHHandler(tunnel *httptunnel.Tunnel) gliderssh.Handler { - return func(client gliderssh.Session) { - log.WithFields(log.Fields{"sshid": client.User()}).Info("SSH connection started") - defer log.WithFields(log.Fields{"sshid": client.User()}).Info("SSH connection closed") - - defer client.Close() - - sess, err := session.NewSession(client, tunnel) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"sshid": client.User()}). - Error("Error when trying to create a new session") - - client.Write([]byte(fmt.Sprintf("%s\n", err.Error()))) // nolint: errcheck - - return - } - defer sess.Finish() // nolint: errcheck - - opts, err := parseConfig() - if err != nil { - writeError(sess, "Error while parsing envs", err, ErrEnvs) - - return - } - - ctx := client.Context() - - // When the Shellhub instance dennies connections with - // potentially broken agents, we need to evaluate the connection's context - // and identify potential bugs. The server must reject the connection - // if there's a possibility of issues; otherwise, proceeds. - if err := evaluateContext(ctx, opts); err != nil { - writeError(sess, "Error while evaluating context", err, err) - - return - } - - config, err := session.NewClientConfiguration(ctx) - if err != nil { - writeError(sess, "Error while creating client configuration", err, ErrConfiguration) - - return - } - - api := metadata.RestoreAPI(ctx) - err = connectSSH(ctx, client, sess, config, api, *opts) - if err != nil { - writeError(sess, "Error during SSH connection", err, err) - - return - } - } -} - -func connectSSH(ctx context.Context, client gliderssh.Session, sess *session.Session, config *gossh.ClientConfig, api internalclient.Client, opts ConfigOptions) error { - connection, reqs, err := sess.NewClientConnWithDeadline(config) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("Error when to authenticate the connection") - - return ErrAuthentication - } - defer connection.Close() - - metadata.MaybeStoreAgentConn(ctx.(gliderssh.Context), connection) - - agent, err := connection.NewSession() - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("Error when trying to start the agent's session") - - return ErrSession - } - defer agent.Close() - - go session.HandleRequests(ctx, reqs, api, ctx.Done()) - - metadata.MaybeStoreEstablished(ctx.(gliderssh.Context), true) - - switch sess.GetType() { - case session.Term, session.Web: - if err := shell(api, sess, agent, client, opts); err != nil { - return ErrRequestShell - } - case session.HereDoc: - err := heredoc(api, sess.UID, agent, client) - if err != nil { - return ErrRequestHeredoc - } - case session.Exec, session.SCP: - device := metadata.RestoreDevice(ctx.(gliderssh.Context)) - - if err := exec(api, sess, device, agent, client); err != nil { - return ErrRequestExec - } - default: - if err := client.Exit(255); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Warning("exiting client returned an error") - } - - return ErrRequestUnsupported - } - - return nil -} - -// exitCodeFromError gets the exit code from the client. -// -// If error is nil, the exit code is zero, meaning that there isn't error. If none exit code is returned, it returns 255. -func exitCodeFromError(err error) int { - if err == nil { - return 0 - } - - fault, ok := err.(*gossh.ExitError) - if !ok { - return 255 - } - - return fault.ExitStatus() -} - -// isUnknownError checks if an error is unknown exit error -// An error is considered known if it is either *gossh.ExitMissingError or *gossh.ExitError. -func isUnknownExitError(err error) bool { - switch err.(type) { - case *gossh.ExitMissingError, *gossh.ExitError: - return false - } - - return err != nil -} - -func resizeWindow(uid string, agent *gossh.Session, winCh <-chan gliderssh.Window) { - for win := range winCh { - if err := agent.WindowChange(win.Height, win.Width); err != nil { - log.WithError(err). - WithFields(log.Fields{"client": uid}). - Warning("failed to send WindowChange") - } - } -} - -// shell handles an interactive terminal session. -func shell(api internalclient.Client, sess *session.Session, agent *gossh.Session, client gliderssh.Session, opts ConfigOptions) error { - uid := sess.UID - - if errs := api.SessionAsAuthenticated(uid); len(errs) > 0 { - log.WithError(errs[0]). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to authenticate the session") - - return errs[0] - } - - pty, winCh, _ := client.Pty() - - log.WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Debug("requesting a PTY for session") - - if err := agent.RequestPty(pty.Term, pty.Window.Height, pty.Window.Width, gossh.TerminalModes{}); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to request a PTY") - - return err - } - - go resizeWindow(uid, agent, winCh) - - flw, err := flow.NewFlow(agent) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to create a flow of data from agent") - - return err - } - - done := make(chan bool) - - go flw.PipeIn(client, done) - - go func() { - buffer := make([]byte, 1024) - for { - read, err := flw.Stdout.Read(buffer) - // The occurrence of io.EOF is expected when the connection ends. - // This indicates that we have reached the end of the input stream, and we need - // to break out of the loop to handle the termination of the connection - if err == io.EOF { - break - } - // Unlike io.EOF, when 'err' is simply not nil, it signifies an unexpected error, - // and we need to log to handle it appropriately. - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Warning("failed to read from stdout in pty client") - - break - } - - if _, err = io.Copy(client, bytes.NewReader(buffer[:read])); err != nil && err != io.EOF { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Warning("failed to copy from stdout in pty client") - - break - } - - if envs.IsEnterprise() || envs.IsCloud() { - message := string(buffer[:read]) - - api.RecordSession(&models.SessionRecorded{ - UID: uid, - Namespace: sess.Lookup["domain"], - Message: message, - Width: pty.Window.Height, - Height: pty.Window.Width, - }, opts.RecordURL) - } - } - }() - - go flw.PipeErr(client.Stderr(), nil) - - go func() { - // When agent stop to send data, it means that the command has finished and the process should be closed. - <-done - - agent.Close() - }() - - if err := agent.Shell(); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to start a new shell") - - return err - } - - if err := agent.Wait(); isUnknownExitError(err) { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Warning("client remote shell returned an error") - } - - // We can safely ignore EOF errors on exit - if err := client.Exit(0); err != nil && err != io.EOF { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Warning("exiting client returned an error") - } - - return nil -} - -// heredoc handles a heredoc session. -func heredoc(api internalclient.Client, uid string, agent *gossh.Session, client gliderssh.Session) error { - if errs := api.SessionAsAuthenticated(uid); len(errs) > 0 { - log.WithError(errs[0]). - WithFields(log.Fields{"session": uid, "sshid": client.User()}). - Error("failed to authenticate the session") - - return errs[0] - } - - flw, err := flow.NewFlow(agent) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": client.User()}). - Error("failed to create a flow of data from agent") - - return err - } - - done := make(chan bool) - - go flw.PipeIn(client, nil) - go flw.PipeOut(client, done) - go flw.PipeErr(client.Stderr(), nil) - - go func() { - // When agent stop to send data, it means that the command has finished and the process should be closed. - <-done - - agent.Close() - }() - - if err := agent.Shell(); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": client.User()}). - Error("failed to start a new shell") - - return err - } - - if err := agent.Wait(); isUnknownExitError(err) { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": client.User()}). - Warning("command on agent returned an error") - } - - if err := client.Exit(exitCodeFromError(err)); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": client.User()}). - Warning("exiting client returned an error") - } - - return nil -} - -// exec handles a non-interactive session. -func exec(api internalclient.Client, sess *session.Session, device *models.Device, agent *gossh.Session, client gliderssh.Session) error { - uid := sess.UID - - if errs := api.SessionAsAuthenticated(uid); len(errs) > 0 { - log.WithError(errs[0]). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to authenticate the session") - - return errs[0] - } - - flw, err := flow.NewFlow(agent) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to create a flow of data from agent to agent") - - return err - } - - // request a new pty when isPty is true - pty, winCh, isPty := client.Pty() - if isPty { - log.WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Debug("requesting a PTY for session") - - if err := agent.RequestPty(pty.Term, pty.Window.Height, pty.Window.Width, gossh.TerminalModes{}); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to request a PTY") - - return err - } - } - - if isPty { - go resizeWindow(uid, agent, winCh) - } - - waitPipeIn := make(chan bool) - waitPipeOut := make(chan bool) - - go flw.PipeIn(client, waitPipeIn) - go flw.PipeOut(client, waitPipeOut) - go flw.PipeErr(client.Stderr(), nil) - - if err := agent.Start(client.RawCommand()); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User(), "command": client.RawCommand()}). - Error("failed to start a command on agent") - - return err - } - - if device.Info.Version != "latest" { - ver, err := semver.NewVersion(device.Info.Version) - if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Error("failed to parse device version") - - return err - } - - // version less 0.9.3 does not support the exec command, what will make some commands to hang forever. - if ver.LessThan(semver.MustParse("0.9.3")) { - go func() { - // When agent stop to send data, it means that the command has finished and the process should be closed. - <-waitPipeIn - agent.Close() - }() - } - } - - // When agent stop to send data, it means that the command has finished and the process should be closed. - <-waitPipeOut - agent.Close() - - if err = agent.Wait(); isUnknownExitError(err) { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User(), "command": client.RawCommand()}). - Warning("command on agent returned an error") - } - - if err := client.Exit(exitCodeFromError(err)); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": sess.UID, "sshid": client.User()}). - Warning("exiting client returned an error") - } - - return nil -} diff --git a/ssh/server/handler/util.go b/ssh/server/handler/util.go deleted file mode 100644 index 15eae2aacb3..00000000000 --- a/ssh/server/handler/util.go +++ /dev/null @@ -1,67 +0,0 @@ -package handler - -import ( - "fmt" - "io" - - "github.com/Masterminds/semver" - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" - "github.com/shellhub-io/shellhub/ssh/session" - log "github.com/sirupsen/logrus" -) - -// sendAndInformError sends the external error to client and log the internal one to server. -func sendAndInformError(client io.Writer, internal, external error) { - log.Error(internal.Error()) - - client.Write([]byte(fmt.Sprintf("%s\n", external.Error()))) // nolint: errcheck -} - -// writeError logs an internal error and writes an external error to the client's session. -func writeError(sess *session.Session, msg string, iErr, eError error) { - log.WithError(iErr). - WithFields(log.Fields{"session": sess.UID, "sshid": sess.Client.User()}). - Error(msg) - - sess.Client.Write([]byte(fmt.Sprintf("%s\n", eError.Error()))) // nolint: errcheck -} - -// evaluateContext evaluates the given context and returns an error if there's anything -// that may cause issues during the connection. -func evaluateContext(ctx gliderssh.Context, opts *ConfigOptions) error { - if !opts.AllowPublickeyAccessBelow060 { - return checkAgentVersionForPublicKey(ctx) - } - - return nil -} - -// checkAgentVersionForPublicKey checks if the agent's version supports public key authentication. -// -// Versions earlier than 0.6.0 do not validate the user when receiving a public key -// authentication request. This implies that requests with invalid users are -// treated as "authenticated" because the connection does not raise any error. -// Moreover, the agent panics after the connection ends. To avoid this, connections -// with public key are not permitted when agent version is 0.5.x or earlier -func checkAgentVersionForPublicKey(ctx gliderssh.Context) error { - if metadata.RestoreAuthenticationMethod(ctx) != metadata.PublicKeyAuthenticationMethod { - return nil - } - - version := metadata.RestoreDevice(ctx).Info.Version - if version == "latest" { - return nil - } - - semverVersion, err := semver.NewVersion(version) - if err != nil { - return ErrInvalidVersion - } - - if semverVersion.LessThan(semver.MustParse("0.6.0")) { - return ErrUnsuportedPublicKeyAuth - } - - return nil -} diff --git a/ssh/server/handler/util_test.go b/ssh/server/handler/util_test.go deleted file mode 100644 index 557c0302249..00000000000 --- a/ssh/server/handler/util_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package handler - -import ( - "testing" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/models" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" - metadataMocks "github.com/shellhub-io/shellhub/ssh/pkg/metadata/mocks" - "github.com/stretchr/testify/assert" -) - -func TestCheckAgentVersionForPublicKey(t *testing.T) { - cases := []struct { - description string - requiredMocks func(gliderssh.Context) - expected error - }{ - { - description: "succeeds when authentication method is passwod", - requiredMocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("RestoreAuthenticationMethod", ctx). - Return(metadata.PasswordAuthenticationMethod). - Once() - }, - expected: nil, - }, - { - description: "succeeds when device's version is 'latest'", - requiredMocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("RestoreAuthenticationMethod", ctx). - Return(metadata.PublicKeyAuthenticationMethod). - Once() - - metadataMock.On("RestoreDevice", ctx). - Return(&models.Device{ - Info: &models.DeviceInfo{ - Version: "latest", - }, - }). - Once() - }, - expected: nil, - }, - { - description: "fails when device's version is 0.5.x or earlier and authentication method is PUBLIC KEY", - requiredMocks: func(ctx gliderssh.Context) { - metadataMock := new(metadataMocks.Metadata) - metadata.SetBackend(metadataMock) - - metadataMock.On("RestoreAuthenticationMethod", ctx). - Return(metadata.PublicKeyAuthenticationMethod). - Once() - - metadataMock.On("RestoreDevice", ctx). - Return(&models.Device{ - Info: &models.DeviceInfo{ - Version: "0.5.2", - }, - }). - Once() - }, - expected: ErrUnsuportedPublicKeyAuth, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - ctx := new(gliderssh.Context) - tc.requiredMocks(*ctx) - - err := checkAgentVersionForPublicKey(*ctx) - assert.Equal(t, tc.expected, err) - }) - } -} diff --git a/ssh/server/handler/web.go b/ssh/server/handler/web.go deleted file mode 100644 index 2bdebcec932..00000000000 --- a/ssh/server/handler/web.go +++ /dev/null @@ -1,324 +0,0 @@ -package handler - -import ( - "bytes" - "crypto/rsa" - "encoding/base64" - "errors" - "io" - "strconv" - "time" - "unicode/utf8" - - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - "github.com/shellhub-io/shellhub/pkg/clock" - "github.com/shellhub-io/shellhub/ssh/pkg/flow" - "github.com/shellhub-io/shellhub/ssh/pkg/magickey" - "github.com/shellhub-io/shellhub/ssh/pkg/target" - "github.com/shellhub-io/shellhub/ssh/web" - log "github.com/sirupsen/logrus" - "golang.org/x/crypto/ssh" - "golang.org/x/net/websocket" -) - -// WebData contains the data required by web terminal connection. -type WebData struct { - // User is the device's user. - User string - // Password is the user's device password. - // when Password is set, Fingerprint must not be set. - Password string - // Fingerprint is the public key fingerprint. - // when Fingerprint is set, Password must not be set. - Fingerprint string - Signature string - // Columns is the width size of pty. - Columns int - // Rows is the height size of pty. - Rows int -} - -// NewWebData create a new WebData. -// WebData contains the data required by web termianl connection. -func NewWebData(socket *websocket.Conn, input *web.Session) (*WebData, error) { - get := func(socket *websocket.Conn, key string) (string, bool) { - value := socket.Request().URL.Query().Get(key) - - return value, value != "" - } - - toInt := func(text string, ok bool) (int, error) { - if !ok { - return 0, errors.New("failed to get the value to convert to int") - } - - integer, err := strconv.Atoi(text) - if err != nil { - log.WithError(err).Error("failed to convert the text to int") - - return 0, err - } - - return integer, nil - } - - columns, err := toInt(get(socket, "cols")) - if err != nil { - return nil, errors.New("cols field is invalid or missing") - } - - rows, err := toInt(get(socket, "rows")) - if err != nil { - return nil, errors.New("rows field is invalid or missing") - } - - target := input.Username + "@" + input.Device - - return &WebData{ - User: target, - Password: input.Password, - Fingerprint: input.Fingerprint, - Signature: input.Signature, - Columns: columns, - Rows: rows, - }, nil -} - -// isPublicKey checks if connection is using public key method. -func (c *WebData) isPublicKey() bool { // nolint: unused - return c.Fingerprint != "" && c.Signature != "" -} - -// isPassword checks if connection is using password method. -func (c *WebData) isPassword() bool { - return c.Password != "" -} - -// GetAuth gets the authentication methods from connection. -func (c *WebData) GetAuth(magicKey *rsa.PrivateKey) ([]ssh.AuthMethod, error) { - if c.isPassword() { - return []ssh.AuthMethod{ssh.Password(c.Password)}, nil - } - - tag, err := target.NewTarget(c.User) - if err != nil { - return nil, ErrTarget - } - - cli := internalclient.NewClient() - - // Trys to get a device from the API. - device, err := cli.GetDevice(tag.Data) - if err != nil { - return nil, ErrFindDevice - } - - // Trys to get a public key from the API. - key, err := cli.GetPublicKey(c.Fingerprint, device.TenantID) - if err != nil { - return nil, ErrFindPublicKey - } - - // Trys to evaluate the public key from the API. - ok, err := cli.EvaluateKey(c.Fingerprint, device, tag.Username) - if err != nil { - return nil, ErrEvaluatePublicKey - } - - if !ok { - return nil, ErrForbiddenPublicKey - } - - pubKey, _, _, _, err := ssh.ParseAuthorizedKey(key.Data) //nolint: dogsled - if err != nil { - return nil, ErrDataPublicKey - } - - digest, err := base64.StdEncoding.DecodeString(c.Signature) - if err != nil { - return nil, ErrSignaturePublicKey - } - - if err := pubKey.Verify([]byte(tag.Username), &ssh.Signature{ //nolint: exhaustruct - Format: pubKey.Type(), - Blob: digest, - }); err != nil { - return nil, ErrVerifyPublicKey - } - - signer, err := ssh.NewSignerFromKey(magicKey) - if err != nil { - return nil, ErrSignerPublicKey - } - - return []ssh.AuthMethod{ssh.PublicKeys(signer)}, nil -} - -// WebSession is the Client's handler for connection coming from the web terminal. -func WebSession(socket *websocket.Conn, input *web.Session) { - log.Info("handling web client request started") - defer log.Info("handling web client request end") - - data, err := NewWebData(socket, input) - if err != nil { - sendAndInformError(socket, err, ErrWebData) - } - - auth, err := data.GetAuth(magickey.GetRerefence()) - if err != nil { - sendAndInformError(socket, err, ErrGetAuth) - - return - } - - connection, err := ssh.Dial("tcp", "localhost:2222", &ssh.ClientConfig{ //nolint: exhaustruct - User: data.User, - Auth: auth, - HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec - }) - if err != nil { - sendAndInformError(socket, err, ErrDialSSH) - - return - } - - defer connection.Close() - - agent, err := connection.NewSession() - if err != nil { - sendAndInformError(socket, err, ErrSession) - - return - } - - defer agent.Close() - - if err = agent.Setenv("IP_ADDRESS", socket.Request().Header.Get("X-Real-Ip")); err != nil { - sendAndInformError(socket, err, ErrEnvIPAddress) - - return - } - - if err = agent.Setenv("WS", "true"); err != nil { - sendAndInformError(socket, err, ErrEnvWS) - - return - } - - flw, err := flow.NewFlow(agent) - if err != nil { - sendAndInformError(socket, err, ErrPipe) - - return - } - - defer flw.Close() - - if err := agent.RequestPty("xterm", data.Rows, data.Columns, ssh.TerminalModes{ - ssh.ECHO: 1, - ssh.TTY_OP_ISPEED: 14400, - ssh.TTY_OP_OSPEED: 14400, - }); err != nil { - sendAndInformError(socket, err, ErrPty) - - return - } - - if err := agent.Shell(); err != nil { - sendAndInformError(socket, err, ErrShell) - - return - } - - done := make(chan bool) - - go flw.PipeIn(socket, done) - go redirToWs(flw.Stdout, socket) // nolint:errcheck - go flw.PipeErr(socket, nil) - - go func() { - <-done - - agent.Close() - }() - - conn := &wsconn{ - pinger: time.NewTicker(pingInterval), - } - - defer conn.pinger.Stop() - - go conn.keepAlive(socket) - - if err := agent.Wait(); err != nil { - log.WithError(err).Warning("client remote command returned a error") - } -} - -func redirToWs(rd io.Reader, ws *websocket.Conn) error { - var buf [32 * 1024]byte - var start, end, buflen int - - for { - nr, err := rd.Read(buf[start:]) - if err != nil { - return err - } - - buflen = start + nr - for end = buflen - 1; end >= 0; end-- { - if utf8.RuneStart(buf[end]) { - ch, width := utf8.DecodeRune(buf[end:buflen]) - if ch != utf8.RuneError { - end += width - } - - break - } - - if buflen-end >= 6 { - end = nr - - break - } - } - - if _, err = ws.Write([]byte(string(bytes.Runes(buf[0:end])))); err != nil { - return err - } - - start = buflen - end - - if start > 0 { - // copy remaning read bytes from the end to the beginning of a buffer - // so that we will get normal bytes - for i := 0; i < start; i++ { - buf[i] = buf[end+i] - } - } - } -} - -const pingInterval = time.Second * 30 - -type wsconn struct { - pinger *time.Ticker -} - -func (w *wsconn) keepAlive(ws *websocket.Conn) { - for { - if err := ws.SetDeadline(clock.Now().Add(pingInterval * 2)); err != nil { - return - } - - if fw, err := ws.NewFrameWriter(websocket.PingFrame); err != nil { - return - } else if _, err = fw.Write([]byte{}); err != nil { - return - } - - if _, running := <-w.pinger.C; !running { - return - } - } -} diff --git a/ssh/server/messages/access_denied.txt b/ssh/server/messages/access_denied.txt new file mode 100644 index 00000000000..ec406ecadbd --- /dev/null +++ b/ssh/server/messages/access_denied.txt @@ -0,0 +1,12 @@ +Access Denied +============== + +Access to the device has been denied. + +Possible reasons: + - Firewall restrictions + - Billing issues or quota exceeded + - Policy rules or permissions + - Device access restrictions + +Please contact your administrator for assistance. diff --git a/ssh/server/messages/connection_failed.txt b/ssh/server/messages/connection_failed.txt new file mode 100644 index 00000000000..49a6aa41a53 --- /dev/null +++ b/ssh/server/messages/connection_failed.txt @@ -0,0 +1,11 @@ +Connection Failed +================== + +The target device is offline or cannot be reached. + +Troubleshooting steps: + - Check if the device is powered on + - Verify network connectivity + - Ensure the device is properly connected to ShellHub + +Please try again once the device is online. diff --git a/ssh/server/messages/invalid_ssh_id.txt b/ssh/server/messages/invalid_ssh_id.txt new file mode 100644 index 00000000000..cb7791bb44f --- /dev/null +++ b/ssh/server/messages/invalid_ssh_id.txt @@ -0,0 +1,12 @@ +SSHID Format Error +================== + +The SSHID format is incorrect. You need to include your username. + +Correct format: username@namespace.device@host + +Examples: + ssh john@company.workstation@example.com + ssh admin@myproject.raspberry@example.com + +Please update your SSH command and try again. diff --git a/ssh/server/server.go b/ssh/server/server.go index a71a4cc0b8f..535daef9b3d 100644 --- a/ssh/server/server.go +++ b/ssh/server/server.go @@ -1,68 +1,121 @@ package server import ( + _ "embed" "net" "os" + "strings" "time" gliderssh "github.com/gliderlabs/ssh" "github.com/pires/go-proxyproto" - "github.com/shellhub-io/shellhub/pkg/httptunnel" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" + "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/ssh/pkg/dialer" + "github.com/shellhub-io/shellhub/ssh/pkg/target" "github.com/shellhub-io/shellhub/ssh/server/auth" "github.com/shellhub-io/shellhub/ssh/server/channels" - "github.com/shellhub-io/shellhub/ssh/server/handler" + "github.com/shellhub-io/shellhub/ssh/session" log "github.com/sirupsen/logrus" ) type Options struct { - ConnectTimeout time.Duration `env:"CONNECT_TIMEOUT,default=30s"` - RedisURI string `env:"REDIS_URI,default=redis://redis:6379"` + ConnectTimeout time.Duration + // Allows SSH to connect with an agent via a public key when the agent version is less than 0.6.0. + // Agents 0.5.x or earlier do not validate the public key request and may panic. + // Please refer to: https://github.com/shellhub-io/shellhub/issues/3453 + AllowPublickeyAccessBelow060 bool } type Server struct { sshd *gliderssh.Server opts *Options - tunnel *httptunnel.Tunnel + dialer *dialer.Dialer } -// NewServer create a new ShellHub's Connect server. -func NewServer(opts *Options, tunnel *httptunnel.Tunnel) *Server { +var ( + //go:embed messages/invalid_ssh_id.txt + InvalidSSHIDMessage string + + //go:embed messages/connection_failed.txt + ConnectionFailedMessage string + + //go:embed messages/access_denied.txt + AccessDeniedMessage string +) + +func NewServer(dialer *dialer.Dialer, cache cache.Cache, opts *Options) *Server { server := &Server{ // nolint: exhaustruct opts: opts, - tunnel: tunnel, + dialer: dialer, } server.sshd = &gliderssh.Server{ // nolint: exhaustruct - Addr: ":2222", - PasswordHandler: auth.PasswordHandler, - PublicKeyHandler: auth.PublicKeyHandler, - SessionRequestCallback: func(client gliderssh.Session, request string) bool { - metadata.StoreRequest(client.Context(), request) + Addr: ":2222", + ConnCallback: func(ctx gliderssh.Context, conn net.Conn) net.Conn { + ctx.SetValue("conn", conn) - target := metadata.RestoreTarget(client.Context()) - log.WithFields(log.Fields{ - "username": target.Username, - "sshid": target.Data, - "request": request, - }).Info("Session request") + return conn + }, + BannerHandler: func(ctx gliderssh.Context) string { + logger := log.WithFields( + log.Fields{ + "uid": ctx.SessionID(), + "sshid": ctx.User(), + }) - return true + logger.Info("new connection established") + + // NOTE: Replace all `\n` with `\r\n` to be compliant with the RFC 4252, section 5.4 + // (https://datatracker.ietf.org/doc/html/rfc4252#section-5.4) that states: "The 'message' + // may consist of multiple lines, with line breaks indicated by CRLF pairs." and to ensure + // proper formatting across different SSH clients. + message := func(msg string) string { + return strings.ReplaceAll(msg, "\n", "\r\n") + } + + if _, err := target.NewTarget(ctx.User()); err != nil { + logger.WithError(err).Error("invalid SSHID") + + return message(InvalidSSHIDMessage) + } + + sess, err := session.NewSession(ctx, dialer, cache) + if err != nil { + logger.WithError(err).Error("failed to create the session") + + return message(ConnectionFailedMessage) + } + + if err := sess.Dial(ctx); err != nil { + logger.WithError(err).Error("destination device is offline or cannot be reached") + + return message(ConnectionFailedMessage) + } + + if err := sess.Evaluate(ctx); err != nil { + logger.WithError(err).Error("destination device has a firewall to blocked it or a billing issue") + + return message(AccessDeniedMessage) + } + + return "" }, - Handler: handler.SSHHandler(tunnel), - SubsystemHandlers: map[string]gliderssh.SubsystemHandler{ - handler.SFTPSubsystem: handler.SFTPSubsystemHandler(tunnel), + PasswordHandler: auth.PasswordHandler, + PublicKeyHandler: auth.PublicKeyHandler, + // Channels form the foundation of secure communication between clients and servers in SSH connections. A + // channel, in the context of SSH, is a logical conduit through which data travels securely between the client + // and the server. SSH channels serve as the infrastructure for executing commands, establishing shell sessions, + // and securely forwarding network services. + ChannelHandlers: map[string]gliderssh.ChannelHandler{ + channels.SessionChannel: channels.DefaultSessionHandler(), + channels.DirectTCPIPChannel: channels.DefaultDirectTCPIPHandler, }, - LocalPortForwardingCallback: func(ctx gliderssh.Context, dhost string, dport uint32) bool { + LocalPortForwardingCallback: func(_ gliderssh.Context, _ string, _ uint32) bool { return true }, - ReversePortForwardingCallback: func(ctx gliderssh.Context, bindHost string, bindPort uint32) bool { + ReversePortForwardingCallback: func(_ gliderssh.Context, _ string, _ uint32) bool { return false }, - ChannelHandlers: map[string]gliderssh.ChannelHandler{ - "session": gliderssh.DefaultSessionHandler, - channels.DirectTCPIPChannel: channels.TunnelDefaultDirectTCPIPHandler(tunnel), - }, } if _, err := os.Stat(os.Getenv("PRIVATE_KEY")); os.IsNotExist(err) { diff --git a/ssh/session/auther.go b/ssh/session/auther.go new file mode 100644 index 00000000000..0bf9b3b5c7d --- /dev/null +++ b/ssh/session/auther.go @@ -0,0 +1,143 @@ +package session + +import ( + "context" + "crypto/x509" + "encoding/pem" + + "github.com/Masterminds/semver" + gliderssh "github.com/gliderlabs/ssh" + "github.com/shellhub-io/shellhub/ssh/pkg/magickey" + gossh "golang.org/x/crypto/ssh" +) + +type authFunc func(*Session, *gossh.ClientConfig) error + +type authMethod int8 + +const ( + AuthMethodPublicKey authMethod = iota // AuthMethodPassword represents a public key authentication + AuthMethodPassword // AuthMethodPassword represents a password authentication +) + +// Auth interface defines a common interface for authenticating a session. An 'Auth' +// must have an associated [authMethod], an [authFunc] to authenticate the session, and +// an 'Evaluate' method to evaluate the session's context if necessary (e.g. the agent +// version when authenticating with public keys). +type Auth interface { + // Method returns the associated authentication method. + Method() authMethod + + // Auth defines the callback that must be called when authenticating the session. + Auth() authFunc + + // Evaluate evaluates the session's context, returning an error if there's something + // possibly broken. It's not always necessary. + Evaluate(*Session) error +} + +type publicKeyAuth struct { + pk gliderssh.PublicKey +} + +func AuthPublicKey(pk gliderssh.PublicKey) Auth { + return &publicKeyAuth{pk: pk} +} + +func (*publicKeyAuth) Method() authMethod { + return AuthMethodPublicKey +} + +func (*publicKeyAuth) Auth() authFunc { + return func(session *Session, config *gossh.ClientConfig) error { + privateKey, err := session.api.CreatePrivateKey(context.TODO()) + if err != nil { + return err + } + + block, _ := pem.Decode(privateKey.Data) + + parsed, err := x509.ParsePKCS1PrivateKey(block.Bytes) + if err != nil { + return err + } + + signer, err := gossh.NewSignerFromKey(parsed) + if err != nil { + return err + } + + config.Auth = []gossh.AuthMethod{ + gossh.PublicKeys(signer), + } + + return nil + } +} + +func (p *publicKeyAuth) Evaluate(session *Session) error { + // Versions earlier than 0.6.0 do not validate the user when receiving a public key + // authentication request. This implies that requests with invalid users are + // treated as "authenticated" because the connection does not raise any error. + // Moreover, the agent panics after the connection ends. To avoid this, connections + // with public key are not permitted when agent version is 0.5.x or earlier + if !sshconf.AllowPublickeyAccessBelow060 { + version := session.Device.Info.Version + if version != "latest" { + semverVersion, err := semver.NewVersion(version) + if err != nil { + return ErrInvalidVersion + } + + if semverVersion.LessThan(semver.MustParse("0.6.0")) { + return ErrUnsuportedPublicKeyAuth + } + } + } + + fingerprint := gossh.FingerprintLegacyMD5(p.pk) + + magic, err := gossh.NewPublicKey(&magickey.GetReference().PublicKey) + if err != nil { + return err + } + + if gossh.FingerprintLegacyMD5(magic) != fingerprint { + if _, err = session.api.GetPublicKey(context.TODO(), fingerprint, session.Device.TenantID); err != nil { + return err + } + + if ok, err := session.api.EvaluateKey(context.TODO(), fingerprint, session.Device, session.Data.Target.Username); !ok || err != nil { + return ErrEvaluatePublicKey + } + } + + return err +} + +type passwordAuth struct { + pwd string +} + +func AuthPassword(pwd string) Auth { + return &passwordAuth{pwd: pwd} +} + +func (*passwordAuth) Method() authMethod { + return AuthMethodPassword +} + +func (p *passwordAuth) Auth() authFunc { + return func(_ *Session, config *gossh.ClientConfig) error { + config.Auth = []gossh.AuthMethod{ + gossh.Password(p.pwd), + } + + return nil + } +} + +func (*passwordAuth) Evaluate(*Session) error { + // We don't need (yet) to do any evaluation when authenticating with password. + return nil +} diff --git a/ssh/session/conf.go b/ssh/session/conf.go new file mode 100644 index 00000000000..5b3f4b0d1a2 --- /dev/null +++ b/ssh/session/conf.go @@ -0,0 +1,26 @@ +package session + +import ( + "github.com/shellhub-io/shellhub/pkg/envs" + log "github.com/sirupsen/logrus" +) + +type config struct { + // Allows SSH to connect with an agent via a public key when the agent version is less than 0.6.0. + // Agents 0.5.x or earlier do not validate the public key request and may panic. + // Please refer to: https://github.com/shellhub-io/shellhub/issues/3453 + AllowPublickeyAccessBelow060 bool `env:"ALLOW_PUBLIC_KEY_ACCESS_BELLOW_0_6_0,default=false"` +} + +// sshconf is a global variable responsible for managing all environment configurations. +var sshconf *config + +func init() { + var err error + + sshconf, err = envs.Parse[config]() + if err != nil { + log.WithError(err). + Error("failed to parse the environment variables") + } +} diff --git a/ssh/session/errors.go b/ssh/session/errors.go index 311d7a409c8..a38e84c70a6 100644 --- a/ssh/session/errors.go +++ b/ssh/session/errors.go @@ -4,11 +4,16 @@ import "fmt" // Errors returned by the NewSession to the client. var ( - ErrBillingBlock = fmt.Errorf("Connection to this device is not available as your current namespace doesn't qualify for the free plan. To gain access, you'll need to contact the namespace owner to initiate an upgrade.\n\nFor a detailed estimate of costs based on your use-cases with ShellHub Cloud, visit our pricing page at https://www.shellhub.io/pricing. If you wish to upgrade immediately, navigate to https://cloud.shellhub.io/settings/billing. Your cooperation is appreciated.") //nolint:all - ErrFirewallBlock = fmt.Errorf("you cannot connect to this device because a firewall rule block your connection") - ErrFirewallConnection = fmt.Errorf("failed to communicate to the firewall") - ErrFirewallUnknown = fmt.Errorf("failed to evaluate the firewall rule") - ErrHost = fmt.Errorf("failed to get the device address") - ErrFindDevice = fmt.Errorf("failed to find the device") - ErrDial = fmt.Errorf("failed to connect to device agent, please check the device connection") + ErrBillingBlock = fmt.Errorf("Connection to this device is not available as your current namespace doesn't qualify for the free plan. To gain access, you'll need to contact the namespace owner to initiate an upgrade.\n\nFor a detailed estimate of costs based on your use-cases with ShellHub Cloud, visit our pricing page at https://www.shellhub.io/pricing. If you wish to upgrade immediately, navigate to https://cloud.shellhub.io/settings/billing. Your cooperation is appreciated.") //nolint:all + ErrFirewallBlock = fmt.Errorf("you cannot connect to this device because a firewall rule block your connection") + ErrFirewallConnection = fmt.Errorf("failed to communicate to the firewall") + ErrFirewallUnknown = fmt.Errorf("failed to evaluate the firewall rule") + ErrHost = fmt.Errorf("failed to get the device address") + ErrFindDevice = fmt.Errorf("failed to find the device") + ErrDial = fmt.Errorf("failed to connect to device agent, please check the device connection") + ErrInvalidVersion = fmt.Errorf("failed to parse device version") + ErrUnsuportedPublicKeyAuth = fmt.Errorf("connections using public keys are not permitted when the agent version is 0.5.x or earlier") + ErrUnexpectedAuthMethod = fmt.Errorf("failed to authenticate the session due to a unexpected method") + ErrEvaluatePublicKey = fmt.Errorf("failed to evaluate the provided public key") + ErrSeatAlreadySet = fmt.Errorf("this seat was already set") ) diff --git a/ssh/session/session.go b/ssh/session/session.go index c399d340742..44bf65147a7 100644 --- a/ssh/session/session.go +++ b/ssh/session/session.go @@ -1,345 +1,804 @@ package session import ( - "crypto/x509" - "encoding/pem" + "context" "errors" "fmt" "net" "net/http" + "strings" + "sync" + "sync/atomic" "time" gliderssh "github.com/gliderlabs/ssh" - "github.com/go-resty/resty/v2" + "github.com/gorilla/websocket" "github.com/shellhub-io/shellhub/pkg/api/internalclient" + "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/pkg/clock" "github.com/shellhub-io/shellhub/pkg/envs" - "github.com/shellhub-io/shellhub/pkg/httptunnel" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/ssh/pkg/dialer" "github.com/shellhub-io/shellhub/ssh/pkg/host" - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" + "github.com/shellhub-io/shellhub/ssh/pkg/target" log "github.com/sirupsen/logrus" gossh "golang.org/x/crypto/ssh" ) -type Session struct { - Client gliderssh.Session - // Username is the user that is trying to connect to the device; user on device. - Username string `json:"username"` - Device string `json:"device_uid"` // nolint: tagliatelle - // UID is the device's UID. - UID string `json:"uid"` - IPAddress string `json:"ip_address"` // nolint: tagliatelle - Type string `json:"type"` - Term string `json:"term"` - Authenticated bool `json:"authenticated"` - Lookup map[string]string - Pty bool - Dialed net.Conn -} - -// checkFirewall evaluates if there are firewall rules that block the connection. -func (s *Session) checkFirewall(ctx gliderssh.Context) (bool, error) { - api := metadata.RestoreAPI(ctx) - lookup := metadata.RestoreLookup(ctx) - - if envs.IsCloud() || envs.IsEnterprise() { - if err := api.FirewallEvaluate(lookup); err != nil { - switch { - case errors.Is(err, internalclient.ErrFirewallConnection): - return false, errors.Join(ErrFirewallConnection, err) - case errors.Is(err, internalclient.ErrFirewallBlock): - return false, errors.Join(ErrFirewallBlock, err) - default: - return false, errors.Join(ErrFirewallUnknown, err) - } - } +type Data struct { + Target *target.Target + // SSHID is the combination of device's name and namespace name. + SSHID string + // Device is the device connected. + Device *models.Device + // Namespace is the namespace where device is located. + Namespace *models.Namespace + IPAddress string + // Type is the connection type. + Type string + // Term is the terminal used for the client. + Term string + // Handled check if the session is already handling a "shell", "exec" or a "subsystem". + Handled bool +} + +// AgentChannel represents a channel open between agent and server. +type AgentChannel struct { + // Channel is an open channel for communication between the agent and the server. + Channel gossh.Channel + // Requests is the channel to handle SSH requests. + Requests <-chan *gossh.Request +} + +// Close closes the underlying agent channel connection. +func (a *AgentChannel) Close() error { + return a.Channel.Close() +} + +// Agent represents a connection to an agent. +type Agent struct { + // Conn is the connection between the Server and Agent. + Conn net.Conn + // Client is a [gossh.Client] connected and authenticated to the agent, waiting for an open session request. + Client *gossh.Client + // Requests is the channel to handle SSH global requests. + Requests <-chan *gossh.Request + // Channels store the channels to agent, and its seat. + Channels map[int]*AgentChannel +} + +// Close closes the underlying ssh client connection. +func (a *Agent) Close() error { + for _, channel := range a.Channels { + channel.Close() //nolint:errcheck } - return true, nil + return a.Client.Close() } -// checkBilling evaluates if the device's namespace has pending payment questions. -func (s *Session) checkBilling(ctx gliderssh.Context, device string) (bool, error) { - api := metadata.RestoreAPI(ctx) +// ClientChannel represents a channel open between client and server. +type ClientChannel struct { + // Channel is an open channel for communication between the client and the server. + Channel gossh.Channel + // Requests is the channel to handle SSH requests. + Requests <-chan *gossh.Request +} - if envs.IsCloud() && envs.HasBilling() { - device, err := api.GetDevice(device) - if err != nil { - return false, errors.Join(ErrFindDevice, err) - } +// Close closes the underlying client channel connection. +func (c *ClientChannel) Close() error { + return c.Channel.Close() +} + +// Client represents a connection to a client. +type Client struct { + // Channels store the channels to client, and its seat. + Channels map[int]*ClientChannel +} - if evaluatation, status, _ := api.BillingEvaluate(device.TenantID); status != 402 && !evaluatation.CanConnect { - return false, errors.Join(ErrBillingBlock, err) +// Close closes a connection to client and all its channels. +func (c *Client) Close() error { + for _, channel := range c.Channels { + if err := channel.Close(); err != nil { + return err } } - return true, nil + return nil } -// dial dials the a connection between SSH server and the device agent. -func (s *Session) dial(ctx gliderssh.Context, tunnel *httptunnel.Tunnel, device string, session string) (net.Conn, error) { - dialed, err := tunnel.Dial(ctx, device) - if err != nil { - return nil, errors.Join(ErrDial, err) +type Events struct { + mu sync.Mutex + conn *websocket.Conn +} + +func (e *Events) WriteJSON(v any) error { + e.mu.Lock() + defer e.mu.Unlock() + + return e.conn.WriteJSON(v) +} + +func (e *Events) Close() error { + e.mu.Lock() + defer e.mu.Unlock() + + err := e.conn.Close() + e.conn = nil + + return err +} + +func (e *Events) Closed() bool { + e.mu.Lock() + defer e.mu.Unlock() + + return e.conn == nil +} + +// TODO: implement [io.Read] and [io.Write] on session to simplify the data piping. +type Session struct { + // UID is the session's UID. + UID string + + // Agent represents a connection to an Agent. + Agent *Agent + // Client represents a connection to a Client. + Client *Client + + api internalclient.Client + dialer *dialer.Dialer + // Events is a connection to the endpoint to save session's events. + Events *Events + + once *sync.Once + + // Seats represents passengers a session. + // + // A passenger is, in a multiplexed SSH session, the subsequent SSH sessions that connect to the same server using + // the already established master connection. + Seats Seats + + Data +} + +// Seat represent a passenger in a session. +type Seat struct { + // HasPty is the status of pty on the seat. + HasPty bool +} + +type Seats struct { + mu *sync.Mutex + // counter count atomically seats of a session. + counter *atomic.Int32 + // Items represents the individual seat of a session. + Items *sync.Map +} + +// NewSeats creates a new [Seats] defining initial values for internal properties. +func NewSeats() Seats { + return Seats{ + mu: new(sync.Mutex), + counter: new(atomic.Int32), + Items: new(sync.Map), } +} - req, _ := http.NewRequest(http.MethodGet, fmt.Sprintf("/ssh/%s", session), nil) - if err = req.Write(dialed); err != nil { - return nil, err +// NewSeat creates a new seat inside seats. +func (s *Seats) NewSeat() (int, error) { + s.mu.Lock() + defer s.mu.Unlock() + + id := int(s.counter.Load()) + defer s.counter.Add(1) + + s.Items.Store(id, &Seat{ + HasPty: false, + }) + + return id, nil +} + +// Get gets a seat reference from their id. +func (s *Seats) Get(seat int) (*Seat, bool) { + loaded, ok := s.Items.Load(seat) + if !ok { + return nil, false + } + + item, ok := loaded.(*Seat) + if !ok { + return nil, false + } + + return item, true +} + +// SetPty sets a pty status to a seat from their id. +func (s *Seats) SetPty(seat int, status bool) { + s.mu.Lock() + defer s.mu.Unlock() + + item, ok := s.Get(seat) + if !ok { + log.Warn("failed to set pty because no seat was created before") + + return } - return dialed, nil + item.HasPty = status + + s.Items.Store(seat, item) } -// NewSession creates a new Client from a client to agent, validating data, instance and payment. -func NewSession(client gliderssh.Session, tunnel *httptunnel.Tunnel) (*Session, error) { - ctx := client.Context() +// NewSession creates a new Session but differs from [New] as it only creates +// the session without registering, connecting to the agent, etc. +// +// It's designed to be used within New. +func NewSession(ctx gliderssh.Context, dialer *dialer.Dialer, cache cache.Cache) (*Session, error) { + snap := getSnapshot(ctx) - hos, err := host.NewHost(client.RemoteAddr().String()) + api, err := internalclient.NewClient(nil) if err != nil { + return nil, err + } + + sshid := ctx.User() + + hos, err := host.NewHost(ctx.RemoteAddr().String()) + if err != nil { + log.WithError(err). + Error("failed to create a new host") + return nil, ErrHost } - if hos.IsLocalhost() { - env := loadEnv(client.Environ()) - if value, ok := env["IP_ADDRESS"]; ok { - hos.Host = value - } + target, err := target.NewTarget(sshid) + if err != nil { + return nil, err } - uid := ctx.Value(gliderssh.ContextKeySessionID).(string) //nolint:forcetypeassert + var namespaceName, deviceName string + if target.IsSSHID() { + namespaceName, deviceName, err = target.SplitSSHID() + if err != nil { + return nil, err + } + } else { + if hos.IsLocalhost() { + var data string - device := metadata.RestoreDevice(ctx) - target := metadata.RestoreTarget(ctx) - lookup := metadata.RestoreLookup(ctx) + if err := cache.Get(ctx, "web-ip/"+sshid, &data); err != nil { + log.WithError(err). + Error("failed to get the ip from web session") - lookup["username"] = target.Username - lookup["ip_address"] = hos.Host + return nil, err + } - session := new(Session) - if ok, err := session.checkFirewall(ctx); err != nil || !ok { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": client.User()}). - Error("Error when trying to evaluate firewall rules") + if err := cache.Delete(ctx, "web-ip/"+sshid); err != nil { + log.WithError(err). + Error("failed to delete the web session ip from cache") - return nil, err + return nil, err + } + + parts := strings.Split(data, ":") + target.Data = parts[0] + hos.Host = parts[1] + } + + device, err := api.GetDevice(ctx, target.Data) + if err != nil { + return nil, err + } + + namespaceName = device.Namespace + deviceName = device.Name } - if ok, err := session.checkBilling(ctx, device.UID); err != nil || !ok { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": client.User()}). - Error("Error when trying to evaluate billing") + lookupDevice, err := api.DeviceLookup(ctx, namespaceName, deviceName) + if err != nil { + return nil, err + } + namespace, err := api.NamespaceLookup(ctx, lookupDevice.TenantID) + if err != nil { return nil, err } - dialed, err := session.dial(ctx, tunnel, device.UID, uid) + events, err := api.EventSessionStream(ctx, ctx.SessionID()) if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": client.User()}). - Error("Error when trying to dial") + log.WithError(err).Error("failed to connecting to endpoint to save session's events") - return nil, ErrDial + return nil, err } - session.Client = client - session.UID = uid - session.Username = target.Username - session.IPAddress = hos.Host - session.Device = device.UID - session.Lookup = lookup - session.Dialed = dialed - - session.setPty() - session.setType() + session := &Session{ + UID: ctx.SessionID(), + api: api, + dialer: dialer, + Events: &Events{ + mu: sync.Mutex{}, + conn: events, + }, + Data: Data{ + IPAddress: hos.Host, + Target: target, + Device: lookupDevice, + Namespace: namespace, + SSHID: fmt.Sprintf("%s@%s.%s", target.Username, namespaceName, deviceName), + }, + once: new(sync.Once), + Seats: NewSeats(), + Agent: &Agent{ + Channels: make(map[int]*AgentChannel), + }, + Client: &Client{ + Channels: make(map[int]*ClientChannel), + }, + } - session.Register(client) // nolint:errcheck + snap.save(session, StateCreated) return session, nil } -// NewSessionWithoutClient creates a new session to connect the agent, validating data, instance and payment. -// -// This function is used to create a new session when the client is not available, what is true when the SSH client -// indicate that the request type is `none` or in the case of a port forwarding -func NewSessionWithoutClient(ctx gliderssh.Context, tunnel *httptunnel.Tunnel) (*Session, error) { - uid := ctx.Value(gliderssh.ContextKeySessionID).(string) //nolint:forcetypeassert +// NewClientChannel accepts a new channel from a client and set a seat for it. +func (s *Session) NewClientChannel(newChannel gossh.NewChannel, seat int) (*ClientChannel, error) { + if _, ok := s.Client.Channels[seat]; ok { + return nil, ErrSeatAlreadySet + } - hos, err := host.NewHost(ctx.RemoteAddr().String()) + channel, requests, err := newChannel.Accept() if err != nil { - return nil, ErrHost + return nil, err } - device := metadata.RestoreDevice(ctx) - target := metadata.RestoreTarget(ctx) - lookup := metadata.RestoreLookup(ctx) + c := &ClientChannel{ + Channel: channel, + Requests: requests, + } - lookup["username"] = target.Username - lookup["ip_address"] = hos.Host + s.Client.Channels[seat] = c - session := new(Session) - if ok, err := session.checkFirewall(ctx); err != nil || !ok { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": target.Username}). - Error("Error when trying to evaluate firewall rules") + return c, nil +} +// NewAgentChannel opens a new channel to agent and set a seat for it. +func (s *Session) NewAgentChannel(name string, seat int) (*AgentChannel, error) { + if _, ok := s.Agent.Channels[seat]; ok { + return nil, ErrSeatAlreadySet + } + + channel, requests, err := s.Agent.Client.OpenChannel(name, nil) + if err != nil { return nil, err } - if ok, err := session.checkBilling(ctx, device.UID); err != nil || !ok { - log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": target.Username}). - Error("Error when trying to evaluate billing") + a := &AgentChannel{ + Channel: channel, + Requests: requests, + } - return nil, err + s.Agent.Channels[seat] = a + + return a, nil +} + +func (s *Session) checkFirewall(ctx context.Context) (bool, error) { + // TODO: Refactor firewall evaluation to remove the map requirement. + if err := s.api.FirewallEvaluate(ctx, map[string]string{ + "domain": s.Namespace.Name, + "name": s.Device.Name, + "username": s.Target.Username, + "ip_address": s.IPAddress, + }); err != nil { + defer log.WithError(err).WithFields(log.Fields{ + "uid": s.UID, + "sshid": s.SSHID, + }).Info("an error or a firewall rule block this connection") + + var e *internalclient.Error + if !errors.As(err, &e) { + return false, ErrFirewallUnknown + } + + switch e.Code { + case http.StatusForbidden: + return false, ErrFirewallBlock + case http.StatusServiceUnavailable: + return false, ErrFirewallConnection + default: + return false, ErrFirewallUnknown + } } - dialed, err := session.dial(ctx, tunnel, device.UID, uid) + return true, nil +} + +func (s *Session) checkBilling(ctx context.Context) (bool, error) { + device, err := s.api.GetDevice(ctx, s.Device.UID) + if err != nil { + defer log.WithError(err).WithFields(log.Fields{ + "uid": s.UID, + "sshid": s.SSHID, + }).Info("failed to get the device on billing evaluation") + + return false, ErrFindDevice + } + + evaluation, err := s.api.BillingEvaluate(context.TODO(), device.TenantID) + if err != nil { + var billingErr *internalclient.Error + if errors.As(err, &billingErr) && billingErr.Code == 402 { + return false, ErrBillingBlock + } + + return false, err + } + + if !evaluation.CanConnect { + return false, ErrBillingBlock + } + + return true, nil +} + +// registerAPISession registers a new session on the API. +func (s *Session) register(ctx context.Context) error { + err := s.api.SessionCreate(ctx, requests.SessionCreate{ + UID: s.UID, + DeviceUID: s.Device.UID, + Username: s.Target.Username, + IPAddress: s.IPAddress, + Type: "none", + Term: "none", + }) if err != nil { log.WithError(err). - WithFields(log.Fields{"session": uid, "sshid": target.Username}). - Error("Error when trying to dial") + WithFields(log.Fields{"session": s.UID, "sshid": s.SSHID}). + Error("Error when trying to register the client on API") - return nil, ErrDial + return err } - session.Client = nil - session.UID = uid - session.Username = target.Username - session.IPAddress = hos.Host - session.Device = device.UID - session.Lookup = lookup - session.Dialed = dialed + return nil +} + +// Authenticate marks the session as authenticated on the API. +// +// It returns an error if authentication fails. +func (s *Session) authenticate(ctx context.Context) error { + value := true - return session, nil + return s.api.UpdateSession(ctx, s.UID, &models.SessionUpdate{ + Authenticated: &value, + }) } -func (s *Session) GetType() string { - return s.Type +func (s *Session) Recorded(seat int) error { + value := true + + if !s.Namespace.Settings.SessionRecord { + return errors.New("record is disable for this namespace") + } + + if seat, ok := s.Seats.Get(seat); !ok || !seat.HasPty { + return errors.New("session won't be recorded because there is no pty") + } + + return s.api.UpdateSession(context.TODO(), s.UID, &models.SessionUpdate{ + Recorded: &value, + }) } -// NewClientConnWithDeadline creates a new connection to the agent. -func (s *Session) NewClientConnWithDeadline(config *gossh.ClientConfig) (*gossh.Client, <-chan *gossh.Request, error) { +// connect connects the session's client to the session's agent. +func (s *Session) connect(ctx gliderssh.Context, authOpt authFunc) error { + config := &gossh.ClientConfig{ + User: s.Target.Username, + HostKeyCallback: gossh.InsecureIgnoreHostKey(), // nolint: gosec + } + + if err := authOpt(s, config); err != nil { + return errors.New("fail to generate the authentication information") + } + const Addr = "tcp" + // NOTICE: When the agent connection is closed, we should redial this connection before trying to authenticate. + if s.Agent.Conn == nil { + if err := s.Dial(ctx); err != nil { + return err + } + } + if config.Timeout > 0 { - if err := s.Dialed.SetReadDeadline(clock.Now().Add(config.Timeout)); err != nil { + if err := s.Agent.Conn.SetReadDeadline(clock.Now().Add(config.Timeout)); err != nil { log.WithError(err). - WithFields(log.Fields{"session": s.UID, "sshid": s.Client.User()}). + WithFields(log.Fields{"session": s.UID, "sshid": s.SSHID}). Error("Error when trying to set dial deadline") - return nil, nil, err + return err } } - cli, chans, reqs, err := gossh.NewClientConn(s.Dialed, Addr, config) + conn, chans, reqs, err := gossh.NewClientConn(s.Agent.Conn, Addr, config) if err != nil { log.WithError(err). WithFields(log.Fields{"session": s.UID}). Error("Error when trying to create the client's connection") - return nil, nil, err + // NOTICE: To help to identify when the Agent's connection is closed, we set it to nil when an + // authentication error happens. + s.Agent.Conn = nil + + return err } if config.Timeout > 0 { - if err := s.Dialed.SetReadDeadline(time.Time{}); err != nil { + if err := s.Agent.Conn.SetReadDeadline(time.Time{}); err != nil { log.WithError(err). - WithFields(log.Fields{"session": s.UID, "sshid": s.Client.User()}). + WithFields(log.Fields{"session": s.UID, "sshid": s.SSHID}). Error("Error when trying to set dial deadline with Time{}") - return nil, nil, err + return err } } ch := make(chan *gossh.Request) close(ch) - return gossh.NewClient(cli, chans, ch), reqs, nil + s.Agent.Client = gossh.NewClient(conn, chans, ch) + s.Agent.Requests = reqs + + return nil } -// Register registers a new Client at the api. -func (s *Session) Register(_ gliderssh.Session) error { - _, err := resty.New().R().SetBody(*s).Post("http://api:8080/internal/sessions") +var ErrDialUnknown = errors.New("unknown protocol version") + +// Dial establishes the underlying transport to the target device. For V1 +// transports an HTTP GET request is issued (legacy reverse tunnel). For +// V2 transports a multistream protocol selection is performed using the +// ProtoSSHOpen identifier followed by a JSON envelope with the session +// id. After this method returns s.Agent.Conn is a raw channel ready for +// SSH key exchange and channel opens. +func (s *Session) Dial(ctx gliderssh.Context) error { + var err error + + ctx.Lock() + defer ctx.Unlock() + + conn, err := s.dialer.DialTo(ctx, s.Device.TenantID, s.Device.UID, dialer.SSHOpenTarget{SessionID: s.UID}) if err != nil { - log.WithError(err). - WithFields(log.Fields{"session": s.UID, "sshid": s.Client.User()}). - Error("Error when trying to register the client on API") + log.WithFields(log.Fields{"session": s.UID, "sshid": s.SSHID}).WithError(err).Error("failed to open ssh session") - return err + return errors.Join(ErrDial, err) } + s.Agent.Conn = conn + return nil } -func (s *Session) Finish() error { - if s.Dialed != nil { - request, _ := http.NewRequest(http.MethodDelete, fmt.Sprintf("/ssh/close/%s", s.UID), nil) +func (s *Session) Evaluate(ctx gliderssh.Context) error { + snap := getSnapshot(ctx) - if err := request.Write(s.Dialed); err != nil { - log.WithError(err). - WithFields(log.Fields{"session": s.UID, "sshid": s.Client.User()}). - Warning("Error when trying write the request to /ssh/close") + if envs.IsEnterprise() { + if ok, err := s.checkFirewall(ctx); err != nil || !ok { + return err + } + + if envs.IsCloud() { + if ok, err := s.checkBilling(ctx); err != nil || !ok { + return err + } + } + } + + snap.save(s, StateEvaluated) + + return nil +} + +// Auth authenticate a [Session] based on the provided context. +// +// As a client may try to create N sessions with the same context, a [snapshot] is used +// to save/retrieve the current session state. To illustrate a practical use of this +// pattern you can imagine a client that wants to connect to a specified device. It first +// calls the `PublicKeyHandler` with a specified context. At this stage, there are no +// sessions associated with the provided context, and a new one will be created. If it +// fails, the same client (and consequently the same context) will call the +// `PasswordHandler`, which also calls `session.New`. Since we have already created a +// session in the previous authentication attempt, instead of repeating all operations, +// we can safely retrieve the same session again but attempt authentication with a +// password this time. +// +// Next steps can use the context's snapshot to retrieve the created session. An error is +// returned if any occurs. +func (s *Session) Auth(ctx gliderssh.Context, auth Auth) error { + snap := getSnapshot(ctx) + + // The following code is structured to be read from top to bottom, disregarding the + // switch and case statements. These statements serve as a "cache" for handling + // different states efficiently. + sess, state := snap.retrieve() + switch state { + case StateEvaluated: + if err := auth.Evaluate(sess); err != nil { + return err } + + if err := sess.register(ctx); err != nil { + return err + } + + snap.save(sess, StateRegistered) + + fallthrough + case StateRegistered: + if err := sess.connect(ctx, auth.Auth()); err != nil { + return err + } + + if err := sess.authenticate(ctx); err != nil { + return err + } + default: + // The default arm is intended to avoid [StateNil] and [StateCreated], what are used before the authentication. + return errors.New("invalid session state") } - if errs := internalclient.NewClient().FinishSession(s.UID); len(errs) > 0 { - log.WithError(errs[0]). - WithFields(log.Fields{"session": s.UID, "sshid": s.Client.User()}). - Error("Error when trying to finish the session") + snap.save(sess, StateFinished) - return errs[0] + return nil +} + +func (s *Session) NewSeat() (int, error) { + return s.Seats.NewSeat() +} + +// Events register an event to the session. +func (s *Session) Event(t string, data any, seat int) { + if s.Events.Closed() { + log.Debug("failed to save because events connection was closed") + + return + } + + s.Events.WriteJSON(&models.SessionEvent{ //nolint:errcheck + Session: s.UID, + Type: models.SessionEventType(t), + Timestamp: clock.Now(), + Data: data, + Seat: seat, + }) +} + +func Event[D any](sess *Session, t string, data []byte, seat int) { + if sess.Events.Closed() { + log.Debug("failed to save because events connection was closed") + + return + } + + d := new(D) + if err := gossh.Unmarshal(data, d); err != nil { + return + } + + sess.Events.WriteJSON(&models.SessionEvent{ //nolint:errcheck + Session: sess.UID, + Type: models.SessionEventType(t), + Timestamp: clock.Now(), + Data: data, + Seat: seat, + }) +} + +func (s *Session) KeepAlive(ctx context.Context) error { + if err := s.api.KeepAliveSession(ctx, s.UID); err != nil { + log.WithError(err). + WithFields(log.Fields{"session": s.UID, "sshid": s.SSHID}). + Error("Error when trying to keep alive the session") + + return err } return nil } -// NewClientConfiguration creates a [gossh.ClientConfig] with the default configuration required by ShellHub -// to connect to the device agent that are inside the [gliderssh.Context]. -func NewClientConfiguration(ctx gliderssh.Context) (*gossh.ClientConfig, error) { - target := metadata.RestoreTarget(ctx) - if target == nil { - return nil, errors.New("failed to get the target from context") +// Announce is a custom message provided by the end user that can be printed when a new connection within the namespace +// is established. +// +// Returns the announcement or an error, if any. If no announcement is set, it returns an empty string. +func (s *Session) Announce(client gossh.Channel) error { + if _, err := client.Write([]byte( + "Connected to " + s.SSHID + " via ShellHub.\n\r", + )); err != nil { + return err } - config := &gossh.ClientConfig{ - User: target.Username, - HostKeyCallback: gossh.InsecureIgnoreHostKey(), // nolint: gosec + announcement := s.Namespace.Settings.ConnectionAnnouncement + + if announcement == "" { + return nil } - api := metadata.RestoreAPI(ctx) - if api == nil { - return nil, errors.New("failed to get the API from context") + // NOTE: Remove whitespace and new lines at end. + announcement = strings.TrimRightFunc(announcement, func(r rune) bool { + return r == ' ' || r == '\n' || r == '\t' + }) + + if _, err := client.Write([]byte(strings.ReplaceAll(announcement, "\n", "\n\r") + "\n\r")); err != nil { + return err } - switch metadata.RestoreAuthenticationMethod(ctx) { - case metadata.PublicKeyAuthenticationMethod: - privateKey, err := api.CreatePrivateKey() - if err != nil { - return nil, err - } + return nil +} - block, _ := pem.Decode(privateKey.Data) +// Finish terminates the session between Agent and Client, sending a request to Agent to closes it. +func (s *Session) Finish() (err error) { + s.once.Do(func() { + log.WithFields(log.Fields{ + "uid": s.UID, + }).Trace("session finish called") - parsed, err := x509.ParsePKCS1PrivateKey(block.Bytes) - if err != nil { - return nil, err - } + defer s.Events.Close() - signer, err := gossh.NewSignerFromKey(parsed) - if err != nil { - return nil, err + if s.Agent.Conn != nil { + request, _ := http.NewRequest(http.MethodDelete, fmt.Sprintf("/ssh/close/%s", s.UID), nil) + + if err = request.Write(s.Agent.Conn); err != nil { + log.WithError(err). + WithFields(log.Fields{"session": s.UID, "sshid": s.SSHID}). + Warning("Error when trying write the request to /ssh/close") + } } - config.Auth = []gossh.AuthMethod{ - gossh.PublicKeys(signer), + if err := s.api.FinishSession(context.TODO(), s.UID); err != nil { + log.WithError(err). + WithFields(log.Fields{"session": s.UID, "sshid": s.SSHID}). + Error("Error when trying to finish the session") } - case metadata.PasswordAuthenticationMethod: - password := metadata.RestorePassword(ctx) - config.Auth = []gossh.AuthMethod{ - gossh.Password(password), + if envs.IsEnterprise() { + log.WithFields(log.Fields{ + "uid": s.UID, + }).Info("saving sessions as Asciinema files") + + s.Seats.Items.Range(func(key, value any) bool { + id := key.(int) + seat := value.(*Seat) + + if seat.HasPty { + if err := s.api.SaveSession(context.TODO(), s.UID, id); err != nil { + log.WithError(err).WithFields(log.Fields{ + "uid": s.UID, + "seat": seat, + }).Error("failed to save the session as Asciinema file") + + return true + } + + log.WithFields(log.Fields{ + "uid": s.UID, + "seat": seat, + }).Info("asciinema file saved") + } + + return true + }) } - } - return config, nil + log.WithFields( + log.Fields{ + "uid": s.UID, + "device": s.Device.UID, + "username": s.Target.Username, + "ip": s.IPAddress, + }).Info("session finished") + }) + + return nil } diff --git a/ssh/session/snapshot.go b/ssh/session/snapshot.go new file mode 100644 index 00000000000..5f8167f5c9b --- /dev/null +++ b/ssh/session/snapshot.go @@ -0,0 +1,54 @@ +package session + +import gliderssh "github.com/gliderlabs/ssh" + +type snapshot struct { + session *Session + state State +} + +// State type defines the current State of an associated session. It is used as "savepoints" for snapshots. +type State int + +const ( + StateNil = iota + 1 // StateNil represents a non initialized session. + StateCreated // StateCreated represents a session that has been created but not yet registered with the API. + StateDialed // StateDialed represents a session that has been connected to a device. + StateEvaluated // StateEvaluated represents a evaluated session. + StateRegistered // StateRegistered represents a session that has been registered with the API but not yet connected to an agent. + StateFinished // StateFinished represents a session that has been completed. +) + +// getSnapshot is responsible for managing the state of a session associated with +// the provided context. It creates a new getSnapshot if one is not already associated. +// A getSnapshot can be used to retrieve and modify the current state of a session, enabling +// composition between steps and avoiding redundant operations. +// +// Utilize [save] to store the state of a session and [retrieve] to retrieve it. +func getSnapshot(ctx gliderssh.Context) *snapshot { + if snap, ok := ctx.Value("snap").(*snapshot); ok && snap != nil { + return snap + } + + snap := &snapshot{session: nil, state: StateNil} + ctx.SetValue("snap", snap) + + return snap +} + +// save stores the provided session and it's state. +func (s *snapshot) save(session *Session, state State) { + s.session = session + s.state = state +} + +// retrieve retrieves the current state and the associated session. +func (s *snapshot) retrieve() (*Session, State) { + return s.session, s.state +} + +// ObtainSession obtains a session and its state from the provided context. If there's +// no session associated, it creates a new one with state [StateNil]. +func ObtainSession(ctx gliderssh.Context) (*Session, State) { + return getSnapshot(ctx).retrieve() +} diff --git a/ssh/session/types.go b/ssh/session/types.go deleted file mode 100644 index 0e0b17167a7..00000000000 --- a/ssh/session/types.go +++ /dev/null @@ -1,67 +0,0 @@ -package session - -import ( - "strings" - - "github.com/shellhub-io/shellhub/ssh/pkg/metadata" -) - -const ( - Web = "web" // web terminal. - Term = "term" // interactive session - Exec = "exec" // command execution - HereDoc = "heredoc" // heredoc pty. - SCP = "scp" // scp. - SFTP = "sftp" // sftp subsystem. - Unk = "unknown" // unknown. -) - -// setPty sets the connection's pty. -func (s *Session) setPty() { - pty, _, isPty := s.Client.Pty() - if isPty { - s.Term = pty.Term - } - - s.Pty = isPty -} - -// setType sets the connection`s type to session. -// -// Connection types possible are: Web, SFTP, SCP, Exec, HereDoc, Term, Unk (unknown) -func (s *Session) setType() { - ctx := s.Client.Context() - - env := loadEnv(s.Client.Environ()) - if value, ok := env["WS"]; ok && value == "true" { - env["WS"] = "false" - s.Type = Web - - return - } - - if s.Client.Subsystem() == SFTP { - s.Type = SFTP - - return - } - - var cmd string - commands := s.Client.Command() - if len(commands) != 0 { - cmd = commands[0] - } - - switch { - case !s.Pty && strings.HasPrefix(cmd, SCP): - s.Type = SCP - case !s.Pty && metadata.RestoreRequest(ctx) == "shell": - s.Type = HereDoc - case cmd != "": - s.Type = Exec - case s.Pty: - s.Type = Term - default: - s.Type = Unk - } -} diff --git a/ssh/session/utils.go b/ssh/session/utils.go deleted file mode 100644 index 61a6b0104f4..00000000000 --- a/ssh/session/utils.go +++ /dev/null @@ -1,59 +0,0 @@ -package session - -import ( - "context" - "strings" - - gliderssh "github.com/gliderlabs/ssh" - "github.com/shellhub-io/shellhub/pkg/api/internalclient" - log "github.com/sirupsen/logrus" - gossh "golang.org/x/crypto/ssh" -) - -// TODO: Evaluate if we can use a dedicated package for this. -func loadEnv(env []string) map[string]string { - m := make(map[string]string, cap(env)) - - for _, s := range env { - sp := strings.Split(s, "=") - if len(sp) == 2 { - k := sp[0] - v := sp[1] - m[k] = v - } - } - - return m -} - -func HandleRequests(ctx context.Context, reqs <-chan *gossh.Request, c internalclient.Client, done <-chan struct{}) { - for { - select { - case req := <-reqs: - if req == nil { - break - } - - switch req.Type { - case "keepalive": - if id, ok := ctx.Value(gliderssh.ContextKeySessionID).(string); ok { - if errs := c.KeepAliveSession(id); len(errs) > 0 { - log.Error(errs[0]) - } - } - - if err := req.Reply(false, nil); err != nil { - log.Error(err) - } - default: - if req.WantReply { - if err := req.Reply(false, nil); err != nil { - log.Error(err) - } - } - } - case <-done: - return - } - } -} diff --git a/ssh/web/conn.go b/ssh/web/conn.go new file mode 100644 index 00000000000..09483ad3399 --- /dev/null +++ b/ssh/web/conn.go @@ -0,0 +1,190 @@ +package web + +import ( + "encoding/json" + "errors" + "io" + "time" + "unicode/utf8" + + "github.com/shellhub-io/shellhub/pkg/clock" + log "github.com/sirupsen/logrus" + "golang.org/x/net/websocket" +) + +//go:generate mockery --name Socket --filename socket.go +type Socket interface { + io.ReadWriteCloser +} + +type Conn struct { + // Socket is the internal websocket connection the messages come from. + Socket Socket + // Pinger is reponsable to inform the server that a SSH session is open. + Pinger *time.Ticker +} + +func NewConn(socket Socket) *Conn { + return &Conn{ + Socket: socket, + Pinger: time.NewTicker(30 * time.Second), + } +} + +// CharacterSize is the size of a single character in bytes when encoded in UTF-8. +const CharacterSize = 4 + +// TermniosMaxLineLength is the maximum line length for a terminal input in characters. +// +// [termios] is a POSIX-defined API for configuring terminal I/O settings in Unix-like systems (Linux, macOS, *BSD, etc.). +// It provides fine-grained control over how terminals (TTYs and PTYs) handle input, output, and line discipline +// features like canonical mode, echo, signals, and baud rates. +// +// Essentially, [termios] settings control how the PTY slave, used by our web terminal, processes input and output data. +// It also affects how the slave buffers input, handles special chars (like Ctrl-C), line editing, etc. In canonical +// mode, the terminal processes input line-by-line, meaning it waits for a newline character before sending the data to +// the application. The maximum line length is 4096 characters, any input longer than that is truncated. +// +// [termios] documentation says: +// +// The maximum line length is 4096 chars (including the +// terminating newline character); lines longer than 4096 chars +// are truncated. After 4095 characters, input processing (e.g., +// ISIG and ECHO* processing) continues, but any input data after +// 4095 characters up to (but not including) any terminating +// newline is discarded. This ensures that the terminal can +// always receive more input until at least one line can be read. +// +// [termios]: https://www.man7.org/linux/man-pages/man3/termios.3.html +const TermniosMaxLineLength = 4096 + +// ReadMessageBufferSize is the size of the buffer used to read messages from the websocket connection. +// +// As we read JSON messages from the websocket connection, we need to ensure that the buffer size is large enough +// so, we have decided to use a buffer size of 16404 bytes, which is the [TermniosMaxLineLength] plus the size of the +// minimum message size [MessageMinSize]. +const ReadMessageBufferSize = MessageMinSize + (TermniosMaxLineLength * CharacterSize) + +func (c *Conn) ReadMessage(message *Message) (int, error) { + limit := io.LimitReader(c.Socket, ReadMessageBufferSize) + decoder := json.NewDecoder(limit) + + var data json.RawMessage + message.Data = &data + + if err := decoder.Decode(message); err != nil { + log.WithError(err).Error("failed to read a line from the websocket connection") + + return 0, errors.Join(ErrConnReadMessageJSONInvalid, err) + } + + switch message.Kind { + case messageKindInput: + var str string + + if err := json.Unmarshal(data, &str); err != nil { + return 0, errors.Join(ErrConnReadMessageJSONInvalid) + } + + // NOTE: Enforce the maximum line length for terminal input even when the buffer store more characters than + // [TermniosMaxLineLength], as the PTY slave will truncate the input to 4096 characters. + if utf8.RuneCountInString(str) > TermniosMaxLineLength { + return 0, errors.Join(ErrConnReadMessageInputTooLong) + } + + message.Data = str + case messageKindResize: + var dim Dimensions + + if err := json.Unmarshal(data, &dim); err != nil { + return 0, errors.Join(ErrConnReadMessageJSONInvalid) + } + + message.Data = dim + case messageKindSignature: + var sig string + + if err := json.Unmarshal(data, &sig); err != nil { + return 0, errors.Join(ErrConnReadMessageJSONInvalid) + } + + message.Data = sig + default: + return 0, errors.Join(ErrConnReadMessageKindInvalid) + } + + return int(decoder.InputOffset()), nil +} + +func (c *Conn) WriteMessage(message *Message) (int, error) { + buffer, err := json.Marshal(message) + if err != nil { + return 0, errors.Join(ErrConnReadMessageJSONInvalid) + } + + wrote, err := c.Socket.Write(buffer) + if err != nil { + return wrote, errors.Join(ErrConnReadMessageSocketWrite, err) + } + + return wrote, nil +} + +func (c *Conn) WriteBinary(data []byte) (int, error) { + socket, ok := c.Socket.(*websocket.Conn) + if !ok { + // NOTE: If the underlying connection is not a websocket connection, fallback to a normal write. + // This is useful for testing purposes, where we use a mock socket that does not implement + // the websocket interface. + return c.Socket.Write(data) + } + + frame, err := socket.NewFrameWriter(websocket.BinaryFrame) + if err != nil { + return 0, errors.Join(ErrConnWriteMessageFailedFrame, err) + } + + wrote, err := frame.Write(data) + if err != nil { + return wrote, errors.Join(ErrConnReadMessageSocketWrite, err) + } + + return wrote, nil +} + +func (c *Conn) Read(buffer []byte) (int, error) { + return c.Socket.Read(buffer) +} + +func (c *Conn) Write(buffer []byte) (int, error) { + return c.Socket.Write(buffer) +} + +func (c *Conn) Close() error { + c.Pinger.Stop() + + return c.Socket.Close() +} + +func (c *Conn) KeepAlive() { + socket, ok := c.Socket.(*websocket.Conn) + if !ok { + return + } + + for { + if err := socket.SetDeadline(clock.Now().Add((time.Second * 30) * 2)); err != nil { + return + } + + if fw, err := socket.NewFrameWriter(websocket.PingFrame); err != nil { + return + } else if _, err = fw.Write([]byte{}); err != nil { + return + } + + if _, running := <-c.Pinger.C; !running { + return + } + } +} diff --git a/ssh/web/conn_test.go b/ssh/web/conn_test.go new file mode 100644 index 00000000000..04ef75041c3 --- /dev/null +++ b/ssh/web/conn_test.go @@ -0,0 +1,167 @@ +package web + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/shellhub-io/shellhub/ssh/web/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestConnReadMessage_input(t *testing.T) { + socket := new(mocks.Socket) + conn := NewConn(socket) + + type Expected struct { + message *Message + read int + err error + } + + tests := []struct { + description string + requiredMocks func() + expect Expected + }{ + { + description: "fail when socket reading fail", + requiredMocks: func() { + socket.On("Read", mock.AnythingOfType("[]uint8")).Return(0, errors.New("")).Once() + }, + expect: Expected{ + message: &Message{ + Data: new(json.RawMessage), + }, + read: 0, + err: ErrConnReadMessageJSONInvalid, + }, + }, + { + description: "fail when data read is not a JSON object", + requiredMocks: func() { + socket.On("Read", mock.AnythingOfType("[]uint8")).Return(512, nil).Once() + }, + expect: Expected{ + message: &Message{Data: new(json.RawMessage)}, + read: 0, + err: ErrConnReadMessageJSONInvalid, + }, + }, + { + description: "success to read the message", + requiredMocks: func() { + socket.On("Read", mock.AnythingOfType("[]uint8")).Return(21, nil).Run(func(args mock.Arguments) { + b := args.Get(0).([]byte) + + buf, _ := json.Marshal(Message{ + Kind: messageKindInput, + Data: "a", + }) + + copy(b, buf) + }).Once() + }, + expect: Expected{ + message: &Message{ + Kind: messageKindInput, + Data: "a", + }, + read: 21, + err: nil, + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + test.requiredMocks() + + var message Message + read, err := conn.ReadMessage(&message) + + assert.Equal(t, test.expect.message, &message) + assert.Equal(t, test.expect.read, read) + assert.ErrorIs(t, err, test.expect.err) + }) + } +} + +func TestConnReadMessage_resize(t *testing.T) { + socket := new(mocks.Socket) + conn := NewConn(socket) + + type Expected struct { + message *Message + read int + err error + } + + tests := []struct { + description string + requiredMocks func() + expect Expected + }{ + { + description: "fail when socket reading fail", + requiredMocks: func() { + socket.On("Read", mock.AnythingOfType("[]uint8")).Return(0, errors.New("")).Once() + }, + expect: Expected{ + message: &Message{Data: new(json.RawMessage)}, + read: 0, + err: ErrConnReadMessageJSONInvalid, + }, + }, + { + description: "fail when data read is not a JSON object", + requiredMocks: func() { + socket.On("Read", mock.AnythingOfType("[]uint8")).Return(512, nil).Once() + }, + expect: Expected{ + message: &Message{Data: new(json.RawMessage)}, + read: 0, + err: ErrConnReadMessageJSONInvalid, + }, + }, + { + description: "success to read the message", + requiredMocks: func() { + socket.On("Read", mock.AnythingOfType("[]uint8")).Return(40, nil).Run(func(args mock.Arguments) { + b := args.Get(0).([]byte) + + buf, _ := json.Marshal(Message{ + Kind: messageKindResize, + Data: Dimensions{Cols: 100, Rows: 50}, + }) + + copy(b, buf) + }).Once() + }, + expect: Expected{ + message: func() *Message { + return &Message{ + Kind: messageKindResize, + Data: Dimensions{Cols: 100, Rows: 50}, + } + }(), + read: 40, + err: nil, + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + test.requiredMocks() + + var message Message + read, err := conn.ReadMessage(&message) + + assert.Equal(t, test.expect.message, &message) + assert.Equal(t, test.expect.read, read) + assert.ErrorIs(t, err, test.expect.err) + }) + } +} diff --git a/ssh/web/errors.go b/ssh/web/errors.go new file mode 100644 index 00000000000..4b55cc2ab21 --- /dev/null +++ b/ssh/web/errors.go @@ -0,0 +1,61 @@ +package web + +import ( + "errors" + "fmt" +) + +var ( + ErrPublicKey = fmt.Errorf("failed to get the parsed public key") + ErrPrivateKey = fmt.Errorf("failed to get a key data from the server") + ErrSigner = fmt.Errorf("failed to create a signer from the private key") + ErrConnect = fmt.Errorf("failed to connect to device") + ErrSession = fmt.Errorf("failed to create a session between the server to the agent") + ErrGetAuth = fmt.Errorf("failed to get auth data from key") + ErrWebData = fmt.Errorf("failed to get the data to connect to device") + ErrFindDevice = fmt.Errorf("failed to find the device") + ErrFindPublicKey = fmt.Errorf("failed to get the public key from the server") + ErrEvaluatePublicKey = fmt.Errorf("failed to evaluate the public key in the server") + ErrForbiddenPublicKey = fmt.Errorf("failed to use the public key for this action") + ErrDataPublicKey = fmt.Errorf("failed to parse the public key data") + ErrSignaturePublicKey = fmt.Errorf("failed to decode the public key signature") + ErrVerifyPublicKey = fmt.Errorf("failed to verify the public key") + ErrSignerPublicKey = fmt.Errorf("failed to signer the public key") + ErrDialSSH = fmt.Errorf("failed to dial to connect to server") + ErrEnvIPAddress = fmt.Errorf("failed to set the env variable of ip address from client") + ErrEnvWS = fmt.Errorf("failed to set the env variable of web socket from client") + ErrPipe = fmt.Errorf("failed to pipe client data to agent") + ErrPty = fmt.Errorf("failed to request the pty to agent") + ErrShell = fmt.Errorf("failed to get the shell to agent") + ErrTarget = fmt.Errorf("failed to get client target") + ErrAuthentication = fmt.Errorf("failed to authenticate to device") + ErrEnvs = fmt.Errorf("failed to parse server envs") + ErrConfiguration = fmt.Errorf("failed to create communication configuration") + ErrInvalidVersion = fmt.Errorf("failed to parse device version") + ErrUnsuportedPublicKeyAuth = fmt.Errorf("connections using public keys are not permitted when the agent version is 0.5.x or earlier") +) + +var ( + ErrConnReadMessageSocketRead = errors.New("failed to read the message from socket") + ErrConnReadMessageSocketWrite = errors.New("failed to write the message's data to socket") + ErrConnReadMessageJSONInvalid = errors.New("failed to parse the message from json") + ErrConnReadMessageKindInvalid = errors.New("this kind of message is invalid") + ErrConnWriteMessageFailedFrame = errors.New("failed to create frame") + ErrConnReadMessageInputTooLong = errors.New("input is too long, maximum allowed is 4096 runes") +) + +var ( + ErrWebSocketGetToken = errors.New("failed to get the token from query") + ErrWebSocketGetDimensions = errors.New("failed to get terminal dimensions from query") + ErrWebSocketGetIP = errors.New("failed to get IP from query") +) + +var ErrBridgeCredentialsNotFound = errors.New("failed to find the credentials") + +var ( + ErrGetToken = errors.New("token not found on request query") + ErrGetIP = errors.New("ip not found on request query") + ErrGetDimensions = errors.New("failed to get a terminal dimension") +) + +var ErrCreditialsNoPassword = errors.New("this creditials does not have a password defined") diff --git a/ssh/web/handlers.go b/ssh/web/handlers.go deleted file mode 100644 index ba6af6ded24..00000000000 --- a/ssh/web/handlers.go +++ /dev/null @@ -1,117 +0,0 @@ -package web - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "net/http" - - log "github.com/sirupsen/logrus" - "golang.org/x/net/websocket" -) - -type ( - functionHandleCreateSession func(ctx context.Context, data *Input) (*Session, error) - functionHandleRestoreSession func(ctx context.Context, data *Output) (*Session, error) -) - -// HandlerCreateSession handles a HTTP request with the data to create a new web session. -// -// It receives on request's body the device's UID and the device's username, either the device's password or the -// device's fingerprint and the device's signature, to returns a JWT token that can be used to connect to the device. -// The JWT token is generated using a UUID as payload, and encrypted using a runtime generated RSA private key. -// -// If a error occurs, it logs on the server the error and returns the error message and the HTTP status code related to -// the error to the user. -func HandlerCreateSession(create functionHandleCreateSession) func(http.ResponseWriter, *http.Request) { - type Request struct { - Device string `json:"device"` - Username string `json:"username"` - Password string `json:"password"` - Fingerprint string `json:"fingerprint"` - Signature string `json:"signature"` - } - - type Response struct { - Token string `json:"token"` - } - - success := func(req http.ResponseWriter, device, username, token string) { - log.WithFields(log.Fields{ - "device": device, - "username": username, - }).Info("session's token generated successfully") - - req.WriteHeader(http.StatusOK) - req.Header().Set("Content-Type", "application/json") - - json.NewEncoder(req).Encode(Response{Token: token}) //nolint: errcheck,errchkjson - } - - fail := func(response http.ResponseWriter, device, username string, status int, err error) { - log.WithError(err).WithFields(log.Fields{ - "device": device, - "username": username, - "status": status, - }).Error("failed to get the session's token") - - http.Error(response, err.Error(), status) - } - - return func(res http.ResponseWriter, req *http.Request) { - var request *Request - if err := json.NewDecoder(req.Body).Decode(&request); err != nil { - fail(res, "", "", http.StatusBadRequest, errors.New("failed to decode the request body")) - } - - data := &Input{ - Device: request.Device, - Username: request.Username, - Password: request.Password, - Fingerprint: request.Fingerprint, - Signature: request.Signature, - } - - session, err := create(req.Context(), data) - if err != nil { - fail(res, data.Device, data.Username, http.StatusInternalServerError, errors.New("failed to generate the session's token")) - } - - success(res, session.Device, session.Username, session.Token) - } -} - -// HandlerCreateSession handles a websocket request with the data to restore web session. -// -// It receives the session's token as a websocket's query parameter and verifies if the token is valid. If the token is -// valid, it calls the websocket handler to connect to the device. If the token is invalid, it returns an error message. -// -// If any other error occurs, it logs on the server the error and returns the error message and the error to the user. -func HandlerRestoreSession(restore functionHandleRestoreSession, handler func(socket *websocket.Conn, session *Session)) websocket.Handler { - return func(socket *websocket.Conn) { - get := func(socket *websocket.Conn, key string) (string, bool) { - value := socket.Request().URL.Query().Get(key) - - return value, value != "" - } - - fail := func(socket *websocket.Conn, internal, external error) { - log.Error(internal.Error()) - - socket.Write([]byte(fmt.Sprintf("%s\n", external.Error()))) //nolint: errcheck - } - - token, ok := get(socket, "token") - if !ok { - fail(socket, errors.New("failed to get the token from the websocket"), errors.New("failed to get the token from the websocket")) - } - - session, err := restore(socket.Request().Context(), &Output{Token: token}) - if err != nil { - fail(socket, err, errors.New("failed to get the session")) - } - - handler(socket, session) - } -} diff --git a/ssh/web/manager.go b/ssh/web/manager.go new file mode 100644 index 00000000000..7671da27534 --- /dev/null +++ b/ssh/web/manager.go @@ -0,0 +1,42 @@ +package web + +import ( + "sync" + "time" +) + +// manager is used to store credentials for a time period. +type manager struct { + // ttl is the time that each credial live in the map. + ttl time.Duration + credentials *sync.Map +} + +// newManager creates a new [Manager] to store the credentials for a time period. +func newManager(ttl time.Duration) *manager { + return &manager{ + ttl: ttl, + credentials: new(sync.Map), + } +} + +// save credentials for a time period. After this, the credentials are deleted. +func (m *manager) save(id string, data *Credentials) { + m.credentials.Store(id, data) + + go time.AfterFunc(m.ttl, func() { + m.credentials.Delete(id) + }) +} + +// get gets the credentials if it time period have not ended. +func (m *manager) get(id string) (*Credentials, bool) { + l, ok := m.credentials.Load(id) + if !ok { + return nil, false + } + + v, ok := l.(*Credentials) + + return v, ok +} diff --git a/ssh/web/manager_test.go b/ssh/web/manager_test.go new file mode 100644 index 00000000000..5d7683c6124 --- /dev/null +++ b/ssh/web/manager_test.go @@ -0,0 +1,50 @@ +package web + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestManagerSave(t *testing.T) { + tests := []struct { + description string + id string + waitFor time.Duration + data *Credentials + }{ + { + description: "insert credential on manager and delete after 1 second", + id: "foo", + waitFor: 1 * time.Second, + data: nil, + }, + { + description: "insert credential on manager and delete after 2 seconds", + id: "bar", + waitFor: 2 * time.Second, + data: nil, + }, + } + + for _, ts := range tests { + test := ts + + t.Run(test.description, func(t *testing.T) { + t.Parallel() + + manager := newManager(test.waitFor) + manager.save(test.id, nil) + + assert.EventuallyWithT(t, func(tt *assert.CollectT) { + _, ok := manager.get(test.id) + assert.False(tt, ok) + + // NOTICE: we are waiting for two times the defined time, verifying each 10 Millisecond if the condition + // met. It means that the wait time multiplied by two is the max time, but the codition can be met until + // its end. + }, 2*test.waitFor, 10*time.Millisecond) + }) + } +} diff --git a/ssh/web/messages.go b/ssh/web/messages.go new file mode 100644 index 00000000000..b8de3950400 --- /dev/null +++ b/ssh/web/messages.go @@ -0,0 +1,28 @@ +package web + +type messageKind uint8 + +const ( + // messageKindInput is the identifier to a input message. This kind of message can be directly send to [web.Conn]. + messageKindInput messageKind = iota + 1 + // messageKindResize is the identifier to a resize request message. This kind of message contains the number of + // columns and rows what the terminal should have. + messageKindResize + // messageKindSignature is the identifier to a signature message. This kind of message contains the data to be + // signed by the user's private key. + messageKindSignature + // messageKindError is the identifier to output an erro rmessage. This kind of message contains data to be show + // in terminal for information propose. + messageKindError +) + +// MessageMinSize is the minimum size of a message in bytes. This is used to validate if the message is valid. +const MessageMinSize = 20 + +// Message is the structure used to send and receive messages through the [web.Conn]. +// +// A message min size could match with [MessageMinSize] constant, which is the size of the JSON object without data. +type Message struct { + Kind messageKind `json:"kind"` + Data any `json:"data"` +} diff --git a/ssh/web/messages_test.go b/ssh/web/messages_test.go new file mode 100644 index 00000000000..e11e01964d5 --- /dev/null +++ b/ssh/web/messages_test.go @@ -0,0 +1,29 @@ +package web + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestMessageMinSize(t *testing.T) { + msg := Message{ + Kind: 0, + Data: "", + } + j, _ := json.Marshal(msg) + + if len(j) != MessageMinSize { + t.Errorf("expected %d, got %d", MessageMinSize, len(j)) + } +} + +func ExampleMessage() { + msg := Message{ + Kind: 0, + Data: "", + } + j, _ := json.Marshal(msg) + + fmt.Println(string(j)) // Output: {"kind":0,"data":""} +} diff --git a/ssh/web/mocks/socket.go b/ssh/web/mocks/socket.go new file mode 100644 index 00000000000..8b911c1e4fc --- /dev/null +++ b/ssh/web/mocks/socket.go @@ -0,0 +1,98 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// Socket is an autogenerated mock type for the Socket type +type Socket struct { + mock.Mock +} + +// Close provides a mock function with no fields +func (_m *Socket) Close() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Close") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Read provides a mock function with given fields: p +func (_m *Socket) Read(p []byte) (int, error) { + ret := _m.Called(p) + + if len(ret) == 0 { + panic("no return value specified for Read") + } + + var r0 int + var r1 error + if rf, ok := ret.Get(0).(func([]byte) (int, error)); ok { + return rf(p) + } + if rf, ok := ret.Get(0).(func([]byte) int); ok { + r0 = rf(p) + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func([]byte) error); ok { + r1 = rf(p) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Write provides a mock function with given fields: p +func (_m *Socket) Write(p []byte) (int, error) { + ret := _m.Called(p) + + if len(ret) == 0 { + panic("no return value specified for Write") + } + + var r0 int + var r1 error + if rf, ok := ret.Get(0).(func([]byte) (int, error)); ok { + return rf(p) + } + if rf, ok := ret.Get(0).(func([]byte) int); ok { + r0 = rf(p) + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func([]byte) error); ok { + r1 = rf(p) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewSocket creates a new instance of Socket. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewSocket(t interface { + mock.TestingT + Cleanup(func()) +}) *Socket { + mock := &Socket{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/ssh/web/pkg/cache/cache.go b/ssh/web/pkg/cache/cache.go deleted file mode 100644 index 1eb7af64099..00000000000 --- a/ssh/web/pkg/cache/cache.go +++ /dev/null @@ -1,109 +0,0 @@ -// Package cache provides a interface to store and retrieve session's data from a cache. -package cache - -import ( - "context" - "errors" - "time" - - "github.com/shellhub-io/shellhub/pkg/cache" - "github.com/shellhub-io/shellhub/ssh/web/pkg/token" -) - -// instance is a singleton cache instance. -var instance cache.Cache - -// TTL is the time to live of the token in the cache. -const TTL = time.Second * 30 - -// Data is the data set to be saved in the cache. -type Data struct { - // Device is the device's name. - Device string - // Username is the username of the user to login. - Username string - // Password is the password of the user to login. - // Password is should be empty if the user is using a public key. - Password string - // Fingerprint is the fingerprint of the public key. - // Fingerprint is should be empty if the user is using a password. - Fingerprint string - // Signature is the signature of the public key. - // Signature is should be empty if the user is using a password. - Signature string -} - -type Token struct { - // ID is the token's identifier. - ID string - // Token is the JWT token. - Token string -} - -// ConnectRedis connects to redis to be used as cache system. -func ConnectRedis(uri string) error { - if instance == nil { - var err error - instance, err = cache.NewRedisCache(uri) - if err != nil { - return err - } - - return nil - } - - return nil -} - -func getConnection() (cache.Cache, error) { //nolint: ireturn - if instance == nil { - return nil, errors.New("cache was not connected") - } - - return instance, nil -} - -// Save saves a data set for TTL time using token as identifier. -func Save(ctx context.Context, token *token.Token, data *Data) (*Token, error) { - connection, err := getConnection() - if err != nil { - return nil, err - } - - if err := connection.Set(ctx, token.ID, data, TTL); err != nil { - return nil, err - } - - return &Token{ - ID: token.ID, - Token: token.Data, - }, nil -} - -// Restore restores a data set using token as identifier. -func Restore(ctx context.Context, token *token.Token) (*Data, error) { - connection, err := getConnection() - if err != nil { - return nil, err - } - - var value struct { - Device string - Username string - Password string - Fingerprint string - Signature string - } - - if err := connection.Get(ctx, token.ID, &value); err != nil { - return nil, err - } - - return &Data{ - Device: value.Device, - Username: value.Username, - Password: value.Password, - Fingerprint: value.Fingerprint, - Signature: value.Signature, - }, nil -} diff --git a/ssh/web/pkg/token/token.go b/ssh/web/pkg/token/token.go index 158f56a73d9..48d56d49a0d 100644 --- a/ssh/web/pkg/token/token.go +++ b/ssh/web/pkg/token/token.go @@ -5,7 +5,7 @@ import ( "crypto/rsa" "fmt" - "github.com/golang-jwt/jwt" + "github.com/golang-jwt/jwt/v5" "github.com/shellhub-io/shellhub/pkg/uuid" "github.com/shellhub-io/shellhub/ssh/pkg/magickey" ) @@ -25,7 +25,7 @@ func NewToken(_ *rsa.PrivateKey) (*Token, error) { token, err := jwt.NewWithClaims(jwt.SigningMethodRS256, jwt.MapClaims{ "id": identifier, - }).SignedString(magickey.GetRerefence()) + }).SignedString(magickey.GetReference()) if err != nil { return nil, err } @@ -41,7 +41,7 @@ func Parse(token string) (*Token, error) { return nil, fmt.Errorf("unexpected method: %s", jwtToken.Header["alg"]) } - return magickey.GetRerefence().Public().(*rsa.PublicKey), nil + return magickey.GetReference().Public().(*rsa.PublicKey), nil }); err != nil { return nil, err } diff --git a/ssh/web/session.go b/ssh/web/session.go new file mode 100644 index 00000000000..03d48892b6d --- /dev/null +++ b/ssh/web/session.go @@ -0,0 +1,342 @@ +package web + +import ( + "bytes" + "context" + "encoding/base64" + "errors" + "fmt" + "io" + "time" + "unicode/utf8" + + "github.com/shellhub-io/shellhub/pkg/api/internalclient" + "github.com/shellhub-io/shellhub/pkg/cache" + "github.com/shellhub-io/shellhub/pkg/uuid" + log "github.com/sirupsen/logrus" + "golang.org/x/crypto/ssh" +) + +type BannerError struct { + Message string +} + +func NewBannerError(message string) *BannerError { + return &BannerError{ + Message: message, + } +} + +func (b *BannerError) Error() string { + return b.Message +} + +// getAuth gets the authentication methods from credentials. +func getAuth(ctx context.Context, conn *Conn, creds *Credentials) ([]ssh.AuthMethod, error) { + if creds.isPassword() { + return []ssh.AuthMethod{ssh.Password(creds.Password)}, nil + } + + cli, err := internalclient.NewClient(nil) + if err != nil { + return nil, err + } + + // Trys to get a device from the API. + device, err := cli.GetDevice(ctx, creds.Device) + if err != nil { + return nil, ErrFindDevice + } + + // Trys to get a public key from the API. + key, err := cli.GetPublicKey(ctx, creds.Fingerprint, device.TenantID) + if err != nil { + return nil, ErrFindPublicKey + } + + // Trys to evaluate the public key from the API. + ok, err := cli.EvaluateKey(ctx, creds.Fingerprint, device, creds.Username) + if err != nil { + return nil, ErrEvaluatePublicKey + } + + if !ok { + return nil, ErrForbiddenPublicKey + } + + pubKey, _, _, _, err := ssh.ParseAuthorizedKey(key.Data) //nolint: dogsled + if err != nil { + return nil, ErrDataPublicKey + } + + signer := &Signer{ + conn: conn, + publicKey: &pubKey, + } + + return []ssh.AuthMethod{ssh.PublicKeys(signer)}, nil +} + +type Signer struct { + conn *Conn + publicKey *ssh.PublicKey +} + +func (s *Signer) PublicKey() ssh.PublicKey { + return *s.publicKey +} + +func (s *Signer) Sign(rand io.Reader, data []byte) (*ssh.Signature, error) { + dataB64 := base64.StdEncoding.EncodeToString(data) + if _, err := s.conn.WriteMessage(&Message{Kind: messageKindSignature, Data: dataB64}); err != nil { + return nil, err + } + + var msg Message + if _, err := s.conn.ReadMessage(&msg); err != nil { + return nil, fmt.Errorf("invalid signature response") + } + + signed, ok := msg.Data.(string) + if !ok { + return nil, fmt.Errorf("data isn't a signed string") + } + + blob, err := base64.StdEncoding.DecodeString(signed) + if err != nil { + return nil, err + } + + return &ssh.Signature{ + Format: s.PublicKey().Type(), + Blob: blob, + }, nil +} + +func newSession(ctx context.Context, cache cache.Cache, conn *Conn, creds *Credentials, dim Dimensions, info Info) error { + logger := log.WithFields(log.Fields{ + "user": creds.Username, + "device": creds.Device, + "cols": dim.Cols, + "rows": dim.Rows, + "ip": info.IP, + }) + + logger.Info("handling web client request started") + + defer logger.Info("handling web client request end") + + uuid := uuid.Generate() + + user := fmt.Sprintf("%s@%s", creds.Username, uuid) + auth, err := getAuth(ctx, conn, creds) + if err != nil { + logger.WithError(err).Debug("failed to get the credentials") + + return ErrGetAuth + } + + if err := cache.Set(ctx, "web-ip/"+user, fmt.Sprintf("%s:%s", creds.Device, info.IP), 1*time.Minute); err != nil { + logger.WithError(err).Debug("failed to set the session IP on the cache") + + return err + } + + defer cache.Delete(ctx, "web-ip/"+user) //nolint:errcheck + + connection, err := ssh.Dial("tcp", "localhost:2222", &ssh.ClientConfig{ //nolint: exhaustruct + User: user, + Auth: auth, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + BannerCallback: func(message string) error { + if message != "" { + return NewBannerError(message) + } + + return nil + }, + }) + if err != nil { + var e *BannerError + + // NOTE: if the connection return a error banner, wrap that message into an error and return to the session. + if errors.As(err, &e) { + logger.WithError(e).Debug("failed to receive the connection banner") + + return e + } + + // NOTE: Otherwise, any other error from the [ssh.Dial] process, we assume it was an authentication error, + // keeping the real error internally to avoid exposing some sensitive data. + logger.WithError(err).Debug("failed to dial to the ssh server") + + return ErrAuthentication + } + + defer connection.Close() + + agent, err := connection.NewSession() + if err != nil { + logger.WithError(err).Debug("failed to create a new session") + + return ErrSession + } + + defer agent.Close() + + stdin, err := agent.StdinPipe() + if err != nil { + logger.WithError(err).Debug("failed to create the stdin pipe") + + return err + } + + stdout, err := agent.StdoutPipe() + if err != nil { + logger.WithError(err).Debug("failed to create the stdout pipe") + + return err + } + + stderr, err := agent.StderrPipe() + if err != nil { + logger.WithError(err).Debug("failed to create the stderr pipe") + + return err + } + + if err := agent.RequestPty("xterm", int(dim.Rows), int(dim.Cols), ssh.TerminalModes{ + ssh.ECHO: 1, + ssh.TTY_OP_ISPEED: 14400, + ssh.TTY_OP_OSPEED: 14400, + }); err != nil { + logger.WithError(err).Debug("failed to request the pty on session") + + return ErrPty + } + + if err := agent.Shell(); err != nil { + logger.WithError(err).Debug("failed to request the shell on session") + + return ErrShell + } + + go func() { + defer agent.Close() + + for { + var message Message + + if _, err := conn.ReadMessage(&message); err != nil { + if errors.Is(err, io.EOF) { + return + } + + logger.WithError(err).Error("failed to read the message from the client") + + return + } + + switch message.Kind { + case messageKindInput: + buffer := message.Data.(string) + + if _, err := stdin.Write([]byte(buffer)); err != nil { + logger.WithError(err).Error("failed to write the message data on the SSH session") + + return + } + case messageKindResize: + dim := message.Data.(Dimensions) + + if err := agent.WindowChange(int(dim.Rows), int(dim.Cols)); err != nil { + logger.WithError(err).Error("failed to change the size of window for terminal session") + + return + } + } + } + }() + + go redirToWs(stdout, conn) // nolint:errcheck + go io.Copy(conn, stderr) //nolint:errcheck + + if err := agent.Wait(); err != nil { + logger.WithError(err).Warning("client remote command returned a error") + } + + return nil +} + +func redirToWs(rd io.Reader, ws *Conn) error { + // TODO: Evaluate refactoring this function to improve its readability. + var buf [32 * 1024]byte + var start, end, buflen int + + for { + nr, err := rd.Read(buf[start:]) + if err != nil { + return err + } + + if nr == 0 { + // NOTE: "Callers should treat a return of 0 and nil as indicating that nothing happened; in particular it + // does not indicate EOF", in such a case, the caller should not interpret it as EOF, but instead wait for + // more data. + // + // https://pkg.go.dev/io#Reader + continue + } + + buflen = start + nr + + for end = buflen - 1; end >= 0; end-- { + if utf8.RuneStart(buf[end]) { + ch, width := utf8.DecodeRune(buf[end:buflen]) + if ch != utf8.RuneError { + end += width + } + + break + } + + if buflen-end >= 6 { + end = nr + + break + } + } + + if end < 0 { + // NOTE: This workround is to avoid a panic in case the end is negative, which would lead to a negative slice. + // This situation can happen when the buffer contains only UTF-8 continuation bytes, which are bytes that + // cannot start a valid UTF-8 rune. In such cases, the loop above will not find a valid rune start and + // will leave `end` as -1. + // + // https://datatracker.ietf.org/doc/html/rfc3629#section-3 + log.WithFields(log.Fields{ + "buf": buf, + "buflen": buflen, + "start": start, + "end": end, + "nr": nr, + }).Warn("end is negative, skipping write to avoid panic") + + end = 0 + } + + if _, err = ws.WriteBinary([]byte(string(bytes.Runes(buf[0:end])))); err != nil { + return err + } + + start = buflen - end + + if start > 0 { + // copy remaning read bytes from the end to the beginning of a buffer + // so that we will get normal bytes + for i := 0; i < start; i++ { + buf[i] = buf[end+i] + } + } + } +} diff --git a/ssh/web/session_test.go b/ssh/web/session_test.go new file mode 100644 index 00000000000..558bb6f1a1f --- /dev/null +++ b/ssh/web/session_test.go @@ -0,0 +1,63 @@ +package web + +import ( + "io" + "testing" + "testing/iotest" + + "github.com/shellhub-io/shellhub/ssh/web/mocks" + "github.com/stretchr/testify/assert" +) + +type zeroReadNoEOFReader struct{} + +func (r *zeroReadNoEOFReader) Read(p []byte) (int, error) { + return 0, nil +} + +// singleRead returns the provided bytes on the first Read call, then EOF. +type singleRead struct { + data []byte + read bool +} + +func (r *singleRead) Read(p []byte) (int, error) { + if r.read { + return 0, io.EOF + } + + n := copy(p, r.data) + r.read = true + + return n, nil +} + +func TestRedirToWs_Regression_EndNegative(t *testing.T) { + mock := mocks.NewSocket(t) + mock.On("Write", []byte{}).Return(0, nil).Once() + + conn := NewConn(mock) + + // All three bytes are UTF-8 continuation bytes, which will cause the + // logic in redirToWs to set end to -1 if not handled properly. + // This test ensures that the function does not panic in such a case. + // + // https://datatracker.ietf.org/doc/html/rfc3629#section-3 + reader := &singleRead{data: []byte{0x80, 0x81, 0x82}} + + assert.NotPanics(t, func() { + _ = redirToWs(reader, conn) + }, "expected redirToWs to panic when end is -1 and negative slice is attempted") +} + +func TestRedirToWs_Regression_ZeroReadThenEOF(t *testing.T) { + conn := &Conn{ + Socket: mocks.NewSocket(t), + } + + reader := iotest.TimeoutReader(&zeroReadNoEOFReader{}) + + assert.NotPanics(t, func() { + _ = redirToWs(reader, conn) + }, "expected redirToWs to handle zero read without panicking") +} diff --git a/ssh/web/utils.go b/ssh/web/utils.go new file mode 100644 index 00000000000..6fab6eef4f8 --- /dev/null +++ b/ssh/web/utils.go @@ -0,0 +1,74 @@ +package web + +import ( + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "encoding/hex" + "errors" +) + +type Credentials struct { + // Device is the device what the session is open. + Device string `json:"device"` + // Username is the username in the device's OS. + Username string `json:"username"` + // Password is the password in the device's OS. + Password string `json:"password"` + // Fingerprint is the identifier of the public key used in the device's OS. + Fingerprint string `json:"fingerprint"` +} + +func (c *Credentials) encryptPassword(key *rsa.PrivateKey) error { + if c.Password == "" { + return ErrCreditialsNoPassword + } + + signed, err := rsa.EncryptOAEP(sha256.New(), rand.Reader, &key.PublicKey, []byte(c.Password), nil) + if err != nil { + return errors.New("failed to sign the session's password") + } + + c.Password = hex.EncodeToString(signed) + + return nil +} + +func (c *Credentials) decryptPassword(key *rsa.PrivateKey) error { + if c.Password == "" { + return ErrCreditialsNoPassword + } + + decoded, err := hex.DecodeString(c.Password) + if err != nil { + return errors.New("failed to decode the session's password") + } + + decrypted, err := rsa.DecryptOAEP(sha256.New(), rand.Reader, key, decoded, nil) + if err != nil { + return errors.New("failed to decrypt the session's password") + } + + c.Password = string(decrypted) + + return nil +} + +func (c *Credentials) isPublicKey() bool { // nolint: unused + return c.Fingerprint != "" +} + +// isPassword checks if connection is using password method. +func (c *Credentials) isPassword() bool { + return !c.isPublicKey() +} + +// Dimensions represents a web SSH terminal dimensions. +type Dimensions struct { + Cols uint32 `json:"cols"` + Rows uint32 `json:"rows"` +} + +type Info struct { + IP string `json:"ip"` +} diff --git a/ssh/web/web.go b/ssh/web/web.go index 359a7f168c4..e7d18109407 100644 --- a/ssh/web/web.go +++ b/ssh/web/web.go @@ -1,120 +1,129 @@ package web import ( - "context" - "crypto/rand" - "crypto/rsa" - "crypto/sha256" - "encoding/hex" - "errors" + "encoding/json" + "net/http" + "time" + "github.com/labstack/echo/v4" + "github.com/shellhub-io/shellhub/pkg/cache" "github.com/shellhub-io/shellhub/ssh/pkg/magickey" - "github.com/shellhub-io/shellhub/ssh/web/pkg/cache" "github.com/shellhub-io/shellhub/ssh/web/pkg/token" + log "github.com/sirupsen/logrus" + "golang.org/x/net/websocket" ) -type Input struct { - Device string - Username string - Password string - Fingerprint string - Signature string -} +// NewSSHServerBridge creates routes into a [echo.Router] to connect a webscoket to SSH using Shell session. +func NewSSHServerBridge(router *echo.Echo, cache cache.Cache) { + const WebsocketSSHBridgeRoute = "/ws/ssh" -type Output struct { - Token string -} + manager := newManager(30 * time.Second) -type Session struct { - Token string - Device string - Username string - Password string - Fingerprint string - Signature string -} + // NOTICE: this is the route that users send your credentials securely. + router.Add(http.MethodPost, WebsocketSSHBridgeRoute, echo.WrapHandler( + http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { + type Success struct { + Token string `json:"token"` + } -// CreateSession creates a new web session. -func CreateSession(ctx context.Context, data *Input) (*Session, error) { - if data == nil { - return nil, errors.New("failed to get the session's data") - } + type Fail struct { + Error string `json:"error"` + } - key := magickey.GetRerefence() + decoder := json.NewDecoder(req.Body) + encoder := json.NewEncoder(res) - token, err := token.NewToken(key) - if err != nil { - return nil, errors.New("failed to generate the session's token") - } + response := func(res http.ResponseWriter, status int, data any) { + res.WriteHeader(status) + res.Header().Set("Content-Type", "application/json") - if data.Password != "" { - signed, err := rsa.EncryptOAEP(sha256.New(), rand.Reader, &key.PublicKey, []byte(data.Password), nil) - if err != nil { - return nil, errors.New("failed to sign the session's password") - } + encoder.Encode(data) //nolint: errcheck,errchkjson + } - data.Password = hex.EncodeToString(signed) - } - - cached, err := cache.Save(ctx, token, &cache.Data{ - Device: data.Device, - Username: data.Username, - Password: data.Password, - Fingerprint: data.Fingerprint, - Signature: data.Signature, - }) - if err != nil { - return nil, errors.New("failed to cache the session's token") - } - - return &Session{ - Token: cached.Token, - Device: data.Device, - Username: data.Username, - Password: data.Password, - Fingerprint: data.Fingerprint, - Signature: data.Signature, - }, nil -} + var request Credentials + if err := decoder.Decode(&request); err != nil { + response(res, http.StatusBadRequest, Fail{Error: err.Error()}) + + return + } -// RestoreSession restores a web session. -func RestoreSession(ctx context.Context, data *Output) (*Session, error) { - if data == nil { - return nil, errors.New("failed to get the session's token") - } + key := magickey.GetReference() - key := magickey.GetRerefence() + token, err := token.NewToken(key) + if err != nil { + response(res, http.StatusBadRequest, Fail{Error: err.Error()}) - token, err := token.Parse(data.Token) - if err != nil { - return nil, errors.New("invalid session's token") - } + return + } - cached, err := cache.Restore(ctx, token) - if err != nil { - return nil, errors.New("failed to get credentials to login") - } + request.encryptPassword(key) //nolint:errcheck + + // NOTICE: saved credentials are delete after a time period. + manager.save(token.ID, &request) + + response(res, http.StatusOK, Success{Token: token.ID}) + })), + ) + + router.Add(http.MethodGet, WebsocketSSHBridgeRoute, echo.WrapHandler(websocket.Handler(func(wsconn *websocket.Conn) { + defer wsconn.Close() + + // exit sends the error's message to the client on the browser. + exit := func(wsconn *websocket.Conn, err error) { + buffer, err := json.Marshal(Message{ + Kind: messageKindError, + Data: err.Error(), + }) + + log.WithError(err).Error("failed to parsing the error message on web terminal") + + wsconn.Write(buffer) //nolint:errcheck + } - if cached.Password != "" { - decoded, err := hex.DecodeString(cached.Password) + token, err := getToken(wsconn.Request()) if err != nil { - return nil, errors.New("failed to decode the session's password") + exit(wsconn, ErrWebSocketGetToken) + + return } - decrypted, err := rsa.DecryptOAEP(sha256.New(), rand.Reader, key, decoded, nil) + cols, rows, err := getDimensions(wsconn.Request()) if err != nil { - return nil, errors.New("failed to decrypt the session's password") + exit(wsconn, ErrWebSocketGetDimensions) + + return } - cached.Password = string(decrypted) - } - - return &Session{ - Token: data.Token, - Device: cached.Device, - Username: cached.Username, - Password: cached.Password, - Fingerprint: cached.Fingerprint, - Signature: cached.Signature, - }, nil + ip, err := getIP(wsconn.Request()) + if err != nil { + exit(wsconn, ErrWebSocketGetIP) + + return + } + + creds, ok := manager.get(token) + if !ok { + exit(wsconn, ErrBridgeCredentialsNotFound) + } + + conn := NewConn(wsconn) + defer conn.Close() + + go conn.KeepAlive() + + creds.decryptPassword(magickey.GetReference()) //nolint:errcheck + + if err := newSession( + wsconn.Request().Context(), + cache, + conn, + creds, + Dimensions{cols, rows}, + Info{IP: ip}, + ); err != nil { + exit(wsconn, err) + + return + } + }))) } diff --git a/ssh/web/websocket.go b/ssh/web/websocket.go new file mode 100644 index 00000000000..76a98fb55f8 --- /dev/null +++ b/ssh/web/websocket.go @@ -0,0 +1,50 @@ +package web + +import ( + "errors" + "net/http" + "strconv" +) + +func getToken(req *http.Request) (string, error) { + token := req.URL.Query().Get("token") + + if token == "" { + return "", ErrGetToken + } + + return token, nil +} + +func getDimensions(req *http.Request) (uint32, uint32, error) { + toUint32 := func(text string) (uint64, error) { + integer, err := strconv.ParseUint(text, 10, 32) + if err != nil { + return 0, err + } + + return integer, nil + } + + cols, err := toUint32(req.URL.Query().Get("cols")) + if err != nil { + return 0, 0, errors.Join(ErrGetDimensions, err) + } + + rows, err := toUint32(req.URL.Query().Get("rows")) + if err != nil { + return 0, 0, errors.Join(ErrGetDimensions, err) + } + + //nolint: gosec // cols and rows are uint32, so we can safely convert them. + return uint32(cols), uint32(rows), nil +} + +func getIP(req *http.Request) (string, error) { + ip := req.Header.Get("X-Real-Ip") + if ip == "" { + return "", ErrGetIP + } + + return ip, nil +} diff --git a/ssh/web/websocket_test.go b/ssh/web/websocket_test.go new file mode 100644 index 00000000000..1b94a1f8c0c --- /dev/null +++ b/ssh/web/websocket_test.go @@ -0,0 +1,224 @@ +package web + +import ( + "math" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetToken(t *testing.T) { + type Expected struct { + token string + err error + } + + tests := []struct { + description string + uri string + expected Expected + }{ + { + description: "fail when token is not set", + uri: "http://localhost", + expected: Expected{ + token: "", + err: ErrGetToken, + }, + }, + { + description: "fail when token is empty on query", + uri: "http://localhost?token=", + expected: Expected{ + token: "", + err: ErrGetToken, + }, + }, + { + description: "success to get the token from query", + uri: "http://localhost?token=foo", + expected: Expected{ + token: "foo", + err: nil, + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + req, _ := http.NewRequest("", test.uri, nil) + + token, err := getToken(req) + + assert.Equal(t, test.expected.token, token) + assert.ErrorIs(t, err, test.expected.err) + }) + } +} + +func TestGetDimensions(t *testing.T) { + type Expected struct { + cols uint32 + rows uint32 + err error + } + + tests := []struct { + description string + uri string + expected Expected + }{ + { + description: "fail when cols and rows is not set", + uri: "http://localhost", + expected: Expected{ + cols: 0, + rows: 0, + err: ErrGetDimensions, + }, + }, + { + description: "fail when cols is set but rows do not", + uri: "http://localhost?cols=100", + expected: Expected{ + cols: 0, + rows: 0, + err: ErrGetDimensions, + }, + }, + { + description: "fail when rows is set but cols do not", + uri: "http://localhost?rows=100", + expected: Expected{ + cols: 0, + rows: 0, + err: ErrGetDimensions, + }, + }, + { + description: "fail when cols and rows are set, but empty", + uri: "http://localhost?cols=&rows=", + expected: Expected{ + cols: 0, + rows: 0, + err: ErrGetDimensions, + }, + }, + { + description: "fail when cols is a negative value", + uri: "http://localhost?cols=-100&rows=100", + expected: Expected{ + cols: 0, + rows: 0, + err: ErrGetDimensions, + }, + }, + { + description: "fail when rows is a negative value", + uri: "http://localhost?cols=100&rows=-100", + expected: Expected{ + cols: 0, + rows: 0, + err: ErrGetDimensions, + }, + }, + { + description: "fail when cols or rows exceed the uint32 limit", + uri: "http://localhost?cols=4294967296&rows=4294967296", + expected: Expected{ + cols: 0, + rows: 0, + err: ErrGetDimensions, + }, + }, + { + description: "success to get the cols and rows uint32 limit", + uri: "http://localhost?cols=4294967295&rows=4294967295", + expected: Expected{ + cols: math.MaxUint32, + rows: math.MaxUint32, + err: nil, + }, + }, + { + description: "success to get the cols and rows", + uri: "http://localhost?cols=100&rows=100", + expected: Expected{ + cols: 100, + rows: 100, + err: nil, + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + req, _ := http.NewRequest("", test.uri, nil) + + cols, rows, err := getDimensions(req) + + assert.Equal(t, test.expected.cols, cols) + assert.Equal(t, test.expected.rows, rows) + + assert.ErrorIs(t, err, test.expected.err) + }) + } +} + +func TestGetIP(t *testing.T) { + type Expected struct { + ip string + err error + } + + tests := []struct { + description string + req *http.Request + expected Expected + }{ + { + description: "fail when IP is not set", + req: &http.Request{ + Header: http.Header{}, + }, + expected: Expected{ + ip: "", + err: ErrGetIP, + }, + }, + { + description: "fail when IP is empty on query", + req: &http.Request{ + Header: map[string][]string{ + "X-Real-Ip": {}, + }, + }, + expected: Expected{ + ip: "", + err: ErrGetIP, + }, + }, + { + description: "success to get the IP from query", + req: &http.Request{ + Header: map[string][]string{ + "X-Real-Ip": {"192.168.1.1"}, + }, + }, + expected: Expected{ + ip: "192.168.1.1", + err: nil, + }, + }, + } + + for _, test := range tests { + t.Run(test.description, func(t *testing.T) { + ip, err := getIP(test.req) + + assert.Equal(t, test.expected.ip, ip) + assert.ErrorIs(t, err, test.expected.err) + }) + } +} diff --git a/tests/environment/configurator.go b/tests/environment/configurator.go new file mode 100644 index 00000000000..d179817c770 --- /dev/null +++ b/tests/environment/configurator.go @@ -0,0 +1,142 @@ +package environment + +import ( + "context" + "io" + "log" + "sync" + "testing" + + "github.com/go-resty/resty/v2" + "github.com/joho/godotenv" + "github.com/shellhub-io/shellhub/pkg/uuid" + "github.com/stretchr/testify/assert" + tc "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/modules/compose" +) + +type DockerComposeConfigurator struct { + envs map[string]string + t *testing.T + mu *sync.Mutex +} + +// New creates a new [DockerComposeConfigurator]. By default, it reads from the .env file, but +// it assigns random values for ports and network to avoid collision errors. Use +// [DockerComposeConfigurator.Up] to build the instance, initiating a [DockerCompose] instance. +func New(t *testing.T) *DockerComposeConfigurator { + envs, err := godotenv.Read("../.env") + if !assert.NoError(t, err) { + assert.FailNow(t, err.Error()) + } + + envs["SHELLHUB_HTTP_PORT"] = GetFreePort(t) + envs["SHELLHUB_SSH_PORT"] = GetFreePort(t) + envs["SHELLHUB_NETWORK"] = "shellhub_network_" + uuid.Generate() + envs["SHELLHUB_LOG_LEVEL"] = "trace" + + return &DockerComposeConfigurator{ + envs: envs, + t: t, + mu: new(sync.Mutex), + } +} + +// WithEnv sets an environment variable with the specified key and value. +func (dcc *DockerComposeConfigurator) WithEnv(key, val string) *DockerComposeConfigurator { + dcc.envs[key] = val + + return dcc +} + +// WithEnvs sets multiple environment variables. +func (dcc *DockerComposeConfigurator) WithEnvs(envs map[string]string) *DockerComposeConfigurator { + for k, v := range envs { + dcc.WithEnv(k, v) + } + + return dcc +} + +// Clone clones a [DockerComposeConfigurator] instance, automatically assigning random ports +// and network to available services. The new instance will use the provided testing.T. +// +// It returns a pointer to the newly cloned struct, calling assert.FailNow if an error +// arises. +func (dcc *DockerComposeConfigurator) Clone(t *testing.T) *DockerComposeConfigurator { + clonedEnv := &DockerComposeConfigurator{ + envs: make(map[string]string), + t: t, + } + + for k, v := range dcc.envs { + clonedEnv.envs[k] = v + } + + dcc.mu.Lock() + clonedEnv.envs["SHELLHUB_HTTP_PORT"] = GetFreePort(t) + clonedEnv.envs["SHELLHUB_SSH_PORT"] = GetFreePort(t) + clonedEnv.envs["SHELLHUB_NETWORK"] = "shellhub_network_" + uuid.Generate() + dcc.mu.Unlock() + + return clonedEnv +} + +// Up initiates the ShellHub instance, blocking until all services are in the running or +// healthy state. +// +// It returns a [DockerCompose], which is a ShellHub Docker environment, calling +// [assert.FailNow] if an error arises. +func (dcc *DockerComposeConfigurator) Up(ctx context.Context) *DockerCompose { + dc := &DockerCompose{ + envs: dcc.envs, + services: make(map[Service]*tc.DockerContainer), + t: dcc.t, + client: resty.New().SetBaseURL("http://localhost:" + dcc.envs["SHELLHUB_HTTP_PORT"]), + down: nil, + } + + tcDc, err := compose.NewDockerComposeWith( + compose.WithStackFiles("../docker-compose.yml", "../docker-compose.test.yml"), + compose.WithLogger(log.New(io.Discard, "", log.LstdFlags)), + ) + if !assert.NoError(dcc.t, err) { + assert.FailNow(dcc.t, err.Error()) + } + + // Since we can't utilize [compose.dockerCompose] in the parameters, + // we must implement the [DockerCompose.down] method here. + dc.down = func() { + err := tcDc.Down( + ctx, + compose.RemoveOrphans(true), + compose.RemoveVolumes(true), + compose.RemoveImagesAll, + ) + if !assert.NoError(dc.t, err) { + assert.FailNow(dc.t, err.Error()) + } + + for k := range dc.services { + dc.services[k] = nil + } + } + + services := []Service{ServiceGateway, ServiceAPI, ServiceSSH, ServiceUI} + // TODO: Perhaps we could devise a strategy to wait for specific services instead + // of blocking until all are running|healthy? + if !assert.NoError(dc.t, tcDc.WithEnv(dcc.envs).Up(ctx, compose.Wait(true))) { + assert.FailNow(dc.t, err.Error()) + } + + for _, service := range services { + composeService, err := tcDc.ServiceContainer(ctx, string(service)) + if !assert.NoError(dc.t, err) { + assert.FailNow(dc.t, err.Error()) + } + + dc.services[service] = composeService + } + + return dc +} diff --git a/tests/environment/docker_compose.go b/tests/environment/docker_compose.go new file mode 100644 index 00000000000..f22f3825678 --- /dev/null +++ b/tests/environment/docker_compose.go @@ -0,0 +1,154 @@ +package environment + +import ( + "context" + "io" + "log" + "testing" + + "github.com/go-resty/resty/v2" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/stretchr/testify/assert" + tc "github.com/testcontainers/testcontainers-go" +) + +type DockerCompose struct { + // t is the [testing.T] associated with the [DockerCompose] instance. It is used for + // making assertions. + t *testing.T + + // services is a list of running services such as API and CLI. + services map[Service]*tc.DockerContainer + + // client is a HTTP client with "http://localhost:{SHELLHUB_HTTP_PORT}" as the base URL. + client *resty.Client + + // envs is a map containing all environment variables passed to the services. + envs map[string]string + + // down is a function designed to be invoked internally within [Down] method calls. This + // attribute is necessary because when initializing docker-compose with testcontainer, + // the returned value is of a private type, rendering it inaccessible for passing as a + // function parameter, for example. Consequently, we construct the Down method within + // the Up method, encapsulating it within an attribute and invoking it within a method. + down func() +} + +// Down stops the [DockerCompose] instance, removing images, services, networks, and volumes +// associated with it. It's generally a good idea to encapsulate it inside a [t.Cleanup] +// function. +func (dc *DockerCompose) Down() { + dc.down() +} + +// R return a [resty.R] with `http://localhost:{SHELLHUB_HTTP_PORT}` as base URL. +func (dc *DockerCompose) R(ctx context.Context) *resty.Request { + return dc.client.R().SetContext(ctx) +} + +func (dc *DockerCompose) JWT(jwt string) { + dc.client.SetAuthScheme("Bearer") + dc.client.SetAuthToken(jwt) +} + +// Env retrieves a environment variable with the specified key. +func (dc *DockerCompose) Env(key string) string { + return dc.envs[key] +} + +// Service retrieves the specified service. +func (dc *DockerCompose) Service(service Service) *tc.DockerContainer { + return dc.services[service] +} + +func (dc *DockerCompose) buildCLICommand(ctx context.Context, cmds []string) (tc.Container, error) { + container, err := tc.GenericContainer(ctx, tc.GenericContainerRequest{ + ContainerRequest: tc.ContainerRequest{ + Cmd: cmds, + Networks: []string{dc.envs["SHELLHUB_NETWORK"]}, + FromDockerfile: tc.FromDockerfile{ + Repo: "cli", + Tag: "test", + Context: "..", + Dockerfile: "cli/Dockerfile.test", + PrintBuildLog: false, + KeepImage: false, + }, + }, + Logger: log.New(io.Discard, "", log.LstdFlags), + }) + if err != nil { + return nil, err + } + + return container, nil +} + +// NewUser creates a new user with the specified values. It is an abstraction around the "user create" method +// of the CLI. +// +// It is not intended to be a test of the method, but it makes some assertions to guarantee that the following +// instructions will not fail, calling assert.FailNow if any do. +func (dc *DockerCompose) NewUser(t *testing.T, username, email, password string) { + container, err := dc.buildCLICommand( + t.Context(), + []string{"./cli", "user", "create", username, password, email}, + ) + if !assert.NoError(dc.t, err) { + assert.FailNow(dc.t, err.Error()) + } + + container.Start(t.Context()) + + t.Cleanup(func() { + container.Terminate(context.Background()) + }) +} + +// NewNamespace creates a new namespace with the specified values. It is an abstraction around the "namespace +// create" method of the CLI. +// +// It is not intended to be a test of the method, but it makes some assertions to guarantee that the following +// instructions will not fail, calling assert.FailNow if any do. +func (dc *DockerCompose) NewNamespace(t *testing.T, owner, name, tenant string) { + container, err := dc.buildCLICommand( + t.Context(), + []string{"./cli", "namespace", "create", name, owner, tenant}, + ) + if !assert.NoError(dc.t, err) { + assert.FailNow(dc.t, err.Error()) + } + + container.Start(t.Context()) + + t.Cleanup(func() { + container.Terminate(context.Background()) + }) +} + +// AuthUser logs in with the provided username and password. It is an abstraction around the "/api/login" +// endpoint. +// +// It is not intended to be a test of the endpoint, but it makes some assertions to guarantee that the following +// instructions will not fail, calling assert.FailNow if any do. +func (dc *DockerCompose) AuthUser(ctx context.Context, username, password string) *models.UserAuthResponse { + auth := new(models.UserAuthResponse) + + res, err := dc.R(ctx). + SetBody(map[string]string{ + "username": username, + "password": password, + }). + SetResult(auth). + Post("/api/login") + + if !assert.NoError(dc.t, err) { + assert.FailNow(dc.t, err.Error()) + } + + if !assert.Equal(dc.t, 200, res.StatusCode()) { + assert.FailNow(dc.t, "login fails") + } + + return auth +} diff --git a/tests/environment/docs.go b/tests/environment/docs.go new file mode 100644 index 00000000000..2621a1bbf5e --- /dev/null +++ b/tests/environment/docs.go @@ -0,0 +1,58 @@ +// Package environment provides an easy way to create, manage, and destroy ShellHub instances +// with Docker Compose. Internally, it uses [github.com/testcontainers/testcontainers-go] +// to manage services. +// +// To get started, call [New], which creates a new [DockerComposeConfigurator]. A configurator +// is a helper struct to manage "docker-compose". By default, a new configurator reads from +// `.env` to set up all environment variables. The following example creates a new configurator +// with a variable "SHELLHUB_ENVIRONMENT" set to "development": +// +// func TestSomething(t *testing.T) { +// cfg := environment.New(t).WithEnv("SHELLHUB_ENVIRONMENT", "development") +// } +// +// To avoid boilerplate between test cases, a clone of a configurator can be made; a clone +// has the same data as the original configurator but is an isolated pointer. +// +// func TestSomething(t *testing.T) { +// cfg := environment.New(t).WithEnv("SHELLHUB_ENVIRONMENT", "development") +// cloneA := cfg.Clone(t) +// cloneB := cloneA.Clone(t) +// // Both `cloneA` and `cloneB` have a "SHELLHUB_ENVIRONMENT" env +// } +// +// Every configurator is associated with a [testing.T], which is used to make standard +// assertions. +// +// To start the instance, you can call [DockerComposeConfigurator.Up], which returns a +// [DockerCompose]. A Docker Compose is a code representation of the running instance; +// it also has a [DockerCompose.Down] method, which stops and cleans up all allocated +// resources for the instance. Generally, it is passed to [testing.T.Cleanup]: +// +// func TestSomething(t *testing.T) { +// cfg := environment.New(t).WithEnv("SHELLHUB_ENVIRONMENT", "development") +// +// dockerCompose := cfg.Up() +// t.Cleanup(dockerCompose.Down) +// } +// +// The running instance provides helper methods to facilitate docker-compose manipulation +// and communication. It also provides helper methods for generic pipelines (e.g., creating a user). +// +// func TestSomething(t *testing.T) { +// ctx := context.Background() +// cfg := environment.New(t).WithEnv("SHELLHUB_ENVIRONMENT", "development") +// +// dockerCompose := cfg.Up() +// t.Cleanup(dockerCompose.Down) +// +// dockerCompose.NewUser(ctx, "john_doe", "john.doe@test.com", "secret") // Create a new user +// dockerCompose.NewNamespace(ctx, "john_doe", "dev", "00000000-0000-0000-0000-000000000000") // And a namespace +// credentials := dockerCompose.AuthUser("john_doe", "secret") +// // Do something ... +// } +// +// You can also use [DockerCompose.Service] and [DockerCompose.Env] to retrieve running +// docker-compose services and environment variable values. [DockerCompose.R] can be used to +// make internal HTTP requests. Refer to the [docker_compose] file for more methods. +package environment diff --git a/tests/environment/utils.go b/tests/environment/utils.go new file mode 100644 index 00000000000..99682c035d1 --- /dev/null +++ b/tests/environment/utils.go @@ -0,0 +1,58 @@ +package environment + +import ( + "bytes" + "io" + "net" + "strconv" + "testing" + + "github.com/docker/docker/pkg/stdcopy" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/exp/slices" +) + +type Service string + +const ( + ServiceGateway Service = "gateway" + ServiceAgent Service = "agent" + ServiceAPI Service = "api" + ServiceSSH Service = "ssh" + ServiceUI Service = "ui" +) + +var freePortController []string + +// GetFreePort returns a randomly available TCP port. It can be used to avoid +// network conflicts in Docker Compose. +func GetFreePort(t *testing.T) string { + addr, err := net.ResolveTCPAddr("tcp", "localhost:0") + require.NoError(t, err) + + l, err := net.ListenTCP("tcp", addr) + require.NoError(t, err) + + defer l.Close() + + port := strconv.Itoa(l.Addr().(*net.TCPAddr).Port) + if slices.Contains(freePortController, port) { + return GetFreePort(t) + } + + freePortController = append(freePortController, port) + + return port +} + +func ReaderToString(t *testing.T, reader io.Reader) string { + buffer := bytes.NewBuffer(make([]byte, 1024)) + + _, err := stdcopy.StdCopy(buffer, io.Discard, reader) + if !assert.NoError(t, err) { + assert.FailNow(t, err.Error()) + } + + return buffer.String() +} diff --git a/tests/go.mod b/tests/go.mod new file mode 100644 index 00000000000..443fecf8fe7 --- /dev/null +++ b/tests/go.mod @@ -0,0 +1,224 @@ +module github.com/shellhub-io/shellhub/tests + +go 1.24.9 + +require ( + github.com/bramvdbogaerde/go-scp v1.5.0 + github.com/docker/docker v28.5.1+incompatible + github.com/go-resty/resty/v2 v2.16.5 + github.com/joho/godotenv v1.5.1 + github.com/pkg/sftp v1.13.9 + github.com/shellhub-io/shellhub v0.20.1 + github.com/sirupsen/logrus v1.9.3 + github.com/stretchr/testify v1.11.1 + github.com/testcontainers/testcontainers-go v0.39.0 + github.com/testcontainers/testcontainers-go/modules/compose v0.39.1 + golang.org/x/crypto v0.45.0 + golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 +) + +require ( + dario.cat/mergo v1.0.2 // indirect + github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect + github.com/DefangLabs/secret-detector v0.0.0-20250811234530-d4b4214cd679 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect + github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect + github.com/aws/aws-sdk-go-v2 v1.39.4 // indirect + github.com/aws/aws-sdk-go-v2/config v1.31.15 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.18.19 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.11 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.11 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.11 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.11 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.29.8 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.38.9 // indirect + github.com/aws/smithy-go v1.23.1 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/buger/goterm v1.0.4 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cenkalti/backoff/v5 v5.0.3 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/clipperhouse/stringish v0.1.1 // indirect + github.com/clipperhouse/uax29/v2 v2.3.0 // indirect + github.com/compose-spec/compose-go/v2 v2.9.0 // indirect + github.com/containerd/console v1.0.5 // indirect + github.com/containerd/containerd/api v1.9.0 // indirect + github.com/containerd/containerd/v2 v2.1.5 // indirect + github.com/containerd/continuity v0.4.5 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/platforms v1.0.0-rc.1 // indirect + github.com/containerd/ttrpc v1.2.7 // indirect + github.com/containerd/typeurl/v2 v2.2.3 // indirect + github.com/cpuguy83/dockercfg v0.3.2 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/buildx v0.29.1 // indirect + github.com/docker/cli v28.5.1+incompatible // indirect + github.com/docker/cli-docs-tool v0.10.0 // indirect + github.com/docker/compose/v2 v2.40.2 // indirect + github.com/docker/distribution v2.8.3+incompatible // indirect + github.com/docker/docker-credential-helpers v0.9.4 // indirect + github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c // indirect + github.com/docker/go-connections v0.6.0 // indirect + github.com/docker/go-metrics v0.0.1 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/ebitengine/purego v0.9.0 // indirect + github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 // indirect + github.com/emicklei/go-restful/v3 v3.13.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/fsnotify/fsevents v0.2.0 // indirect + github.com/fvbommel/sortorder v1.1.0 // indirect + github.com/fxamacker/cbor/v2 v2.9.0 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.3.0 // indirect + github.com/go-openapi/jsonpointer v0.22.1 // indirect + github.com/go-openapi/jsonreference v0.21.2 // indirect + github.com/go-openapi/swag v0.25.1 // indirect + github.com/go-openapi/swag/cmdutils v0.25.1 // indirect + github.com/go-openapi/swag/conv v0.25.1 // indirect + github.com/go-openapi/swag/fileutils v0.25.1 // indirect + github.com/go-openapi/swag/jsonname v0.25.1 // indirect + github.com/go-openapi/swag/jsonutils v0.25.1 // indirect + github.com/go-openapi/swag/loading v0.25.1 // indirect + github.com/go-openapi/swag/mangling v0.25.1 // indirect + github.com/go-openapi/swag/netutils v0.25.1 // indirect + github.com/go-openapi/swag/stringutils v0.25.1 // indirect + github.com/go-openapi/swag/typeutils v0.25.1 // indirect + github.com/go-openapi/swag/yamlutils v0.25.1 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.11.2 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect + github.com/gofrs/flock v0.13.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang-jwt/jwt/v5 v5.3.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/gnostic-models v0.7.0 // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-version v1.7.0 // indirect + github.com/in-toto/in-toto-golang v0.9.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf // indirect + github.com/jonboulle/clockwork v0.5.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.18.1 // indirect + github.com/kr/fs v0.1.0 // indirect + github.com/leodido/go-urn v1.2.2 // indirect + github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3 // indirect + github.com/magiconair/properties v1.8.10 // indirect + github.com/mattn/go-runewidth v0.0.19 // indirect + github.com/mattn/go-shellwords v1.0.12 // indirect + github.com/miekg/pkcs11 v1.1.1 // indirect + github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect + github.com/moby/buildkit v0.25.1 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/go-archive v0.1.0 // indirect + github.com/moby/locker v1.0.1 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/spdystream v0.5.0 // indirect + github.com/moby/sys/atomicwriter v0.1.0 // indirect + github.com/moby/sys/capability v0.4.0 // indirect + github.com/moby/sys/mountinfo v0.7.2 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect + github.com/moby/sys/signal v0.7.1 // indirect + github.com/moby/sys/symlink v0.3.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect + github.com/moby/term v0.5.2 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.3-0.20250322232337-35a7c28c31ee // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect + github.com/prometheus/client_golang v1.23.2 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.67.2 // indirect + github.com/prometheus/procfs v0.19.1 // indirect + github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect + github.com/secure-systems-lab/go-securesystemslib v0.9.1 // indirect + github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b // indirect + github.com/shibumi/go-pathspec v1.3.0 // indirect + github.com/shirou/gopsutil/v4 v4.25.9 // indirect + github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 // indirect + github.com/spf13/cobra v1.10.1 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/theupdateframework/notary v0.7.0 // indirect + github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 // indirect + github.com/tklauser/go-sysconf v0.3.15 // indirect + github.com/tklauser/numcpus v0.10.0 // indirect + github.com/tonistiigi/dchapes-mode v0.0.0-20250318174251-73d941a28323 // indirect + github.com/tonistiigi/fsutil v0.0.0-20250605211040-586307ad452f // indirect + github.com/tonistiigi/go-csvvalue v0.0.0-20240814133006-030d3b2625d0 // indirect + github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea // indirect + github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab // indirect + github.com/x448/float16 v0.8.4 // indirect + github.com/xhit/go-str2duration/v2 v2.1.0 // indirect + github.com/yusufpapurcu/wmi v1.2.4 // indirect + github.com/zclconf/go-cty v1.17.0 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 // indirect + go.opentelemetry.io/otel v1.38.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect + go.opentelemetry.io/otel/metric v1.38.0 // indirect + go.opentelemetry.io/otel/sdk v1.38.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.38.0 // indirect + go.opentelemetry.io/otel/trace v1.38.0 // indirect + go.opentelemetry.io/proto/otlp v1.8.0 // indirect + go.yaml.in/yaml/v2 v2.4.3 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/oauth2 v0.32.0 // indirect + golang.org/x/sync v0.18.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/term v0.37.0 // indirect + golang.org/x/text v0.31.0 // indirect + golang.org/x/time v0.14.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20251022142026-3a174f9686a8 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251022142026-3a174f9686a8 // indirect + google.golang.org/grpc v1.76.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/evanphx/json-patch.v4 v4.13.0 // indirect + gopkg.in/inf.v0 v0.9.1 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + k8s.io/api v0.34.1 // indirect + k8s.io/apimachinery v0.34.1 // indirect + k8s.io/client-go v0.34.1 // indirect + k8s.io/klog/v2 v2.130.1 // indirect + k8s.io/kube-openapi v0.0.0-20250910181357-589584f1c912 // indirect + k8s.io/utils v0.0.0-20251002143259-bc988d571ff4 // indirect + sigs.k8s.io/json v0.0.0-20250730193827-2d320260d730 // indirect + sigs.k8s.io/randfill v1.0.0 // indirect + sigs.k8s.io/structured-merge-diff/v6 v6.3.0 // indirect + sigs.k8s.io/yaml v1.6.0 // indirect + tags.cncf.io/container-device-interface v1.0.1 // indirect +) diff --git a/tests/go.sum b/tests/go.sum new file mode 100644 index 00000000000..8faf17948ad --- /dev/null +++ b/tests/go.sum @@ -0,0 +1,770 @@ +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/DefangLabs/secret-detector v0.0.0-20250811234530-d4b4214cd679 h1:qNT7R4qrN+5u5ajSbqSW1opHP4LA8lzA+ASyw5MQZjs= +github.com/DefangLabs/secret-detector v0.0.0-20250811234530-d4b4214cd679/go.mod h1:blbwPQh4DTlCZEfk1BLU4oMIhLda2U+A840Uag9DsZw= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.13.0 h1:/BcXOiS6Qi7N9XqUcv27vkIuVOkBEcWstd2pMlWSeaA= +github.com/Microsoft/hcsshim v0.13.0/go.mod h1:9KWJ/8DgU+QzYGupX4tzMhRQE8h6w90lH6HAaclpEok= +github.com/Shopify/logrus-bugsnag v0.0.0-20170309145241-6dbc35f2c30d h1:hi6J4K6DKrR4/ljxn6SF6nURyu785wKMuQcjt7H3VCQ= +github.com/Shopify/logrus-bugsnag v0.0.0-20170309145241-6dbc35f2c30d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 h1:aM1rlcoLz8y5B2r4tTLMiVTrMtpfY0O8EScKJxaSaEc= +github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092/go.mod h1:rYqSE9HbjzpHTI74vwPvae4ZVYZd1lue2ta6xHPdblA= +github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= +github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/aws/aws-sdk-go-v2 v1.39.4 h1:qTsQKcdQPHnfGYBBs+Btl8QwxJeoWcOcPcixK90mRhg= +github.com/aws/aws-sdk-go-v2 v1.39.4/go.mod h1:yWSxrnioGUZ4WVv9TgMrNUeLV3PFESn/v+6T/Su8gnM= +github.com/aws/aws-sdk-go-v2/config v1.31.15 h1:gE3M4xuNXfC/9bG4hyowGm/35uQTi7bUKeYs5e/6uvU= +github.com/aws/aws-sdk-go-v2/config v1.31.15/go.mod h1:HvnvGJoE2I95KAIW8kkWVPJ4XhdrlvwJpV6pEzFQa8o= +github.com/aws/aws-sdk-go-v2/credentials v1.18.19 h1:Jc1zzwkSY1QbkEcLujwqRTXOdvW8ppND3jRBb/VhBQc= +github.com/aws/aws-sdk-go-v2/credentials v1.18.19/go.mod h1:DIfQ9fAk5H0pGtnqfqkbSIzky82qYnGvh06ASQXXg6A= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.11 h1:X7X4YKb+c0rkI6d4uJ5tEMxXgCZ+jZ/D6mvkno8c8Uw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.11/go.mod h1:EqM6vPZQsZHYvC4Cai35UDg/f5NCEU+vp0WfbVqVcZc= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.11 h1:7AANQZkF3ihM8fbdftpjhken0TP9sBzFbV/Ze/Y4HXA= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.11/go.mod h1:NTF4QCGkm6fzVwncpkFQqoquQyOolcyXfbpC98urj+c= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.11 h1:ShdtWUZT37LCAA4Mw2kJAJtzaszfSHFb5n25sdcv4YE= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.11/go.mod h1:7bUb2sSr2MZ3M/N+VyETLTQtInemHXb/Fl3s8CLzm0Y= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2 h1:xtuxji5CS0JknaXoACOunXOYOQzgfTvGAc9s2QdCJA4= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2/go.mod h1:zxwi0DIR0rcRcgdbl7E2MSOvxDyyXGBlScvBkARFaLQ= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.11 h1:GpMf3z2KJa4RnJ0ew3Hac+hRFYLZ9DDjfgXjuW+pB54= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.11/go.mod h1:6MZP3ZI4QQsgUCFTwMZA2V0sEriNQ8k2hmoHF3qjimQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.29.8 h1:M5nimZmugcZUO9wG7iVtROxPhiqyZX6ejS1lxlDPbTU= +github.com/aws/aws-sdk-go-v2/service/sso v1.29.8/go.mod h1:mbef/pgKhtKRwrigPPs7SSSKZgytzP8PQ6P6JAAdqyM= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.3 h1:S5GuJZpYxE0lKeMHKn+BRTz6PTFpgThyJ+5mYfux7BM= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.3/go.mod h1:X4OF+BTd7HIb3L+tc4UlWHVrpgwZZIVENU15pRDVTI0= +github.com/aws/aws-sdk-go-v2/service/sts v1.38.9 h1:Ekml5vGg6sHSZLZJQJagefnVe6PmqC2oiRkBq4F7fU0= +github.com/aws/aws-sdk-go-v2/service/sts v1.38.9/go.mod h1:/e15V+o1zFHWdH3u7lpI3rVBcxszktIKuHKCY2/py+k= +github.com/aws/smithy-go v1.23.1 h1:sLvcH6dfAFwGkHLZ7dGiYF7aK6mg4CgKA/iDKjLDt9M= +github.com/aws/smithy-go v1.23.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= +github.com/beorn7/perks v0.0.0-20150223135152-b965b613227f/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bitly/go-hostpool v0.1.0/go.mod h1:4gOCgp6+NZnVqlKyZ/iBZFTAJKembaVENUpMkpg42fw= +github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= +github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/bramvdbogaerde/go-scp v1.5.0 h1:a9BinAjTfQh273eh7vd3qUgmBC+bx+3TRDtkZWmIpzM= +github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ= +github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= +github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0 h1:s7+5BfS4WFJoVF9pnB8kBk03S7pZXRdKamnV0FOl5Sc= +github.com/bugsnag/bugsnag-go v1.0.5-0.20150529004307-13fd6b8acda0/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= +github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b h1:otBG+dV+YK+Soembjv71DPz3uX/V/6MMlSyD9JBQ6kQ= +github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= +github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0 h1:nvj0OLI3YqYXer/kZD8Ri1aaunCxIEsOst1BVJswV0o= +github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM= +github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs= +github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA= +github.com/clipperhouse/uax29/v2 v2.3.0 h1:SNdx9DVUqMoBuBoW3iLOj4FQv3dN5mDtuqwuhIGpJy4= +github.com/clipperhouse/uax29/v2 v2.3.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g= +github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004 h1:lkAMpLVBDaj17e85keuznYcH5rqI438v41pKcBl4ZxQ= +github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004/go.mod h1:yMWuSON2oQp+43nFtAV/uvKQIFpSPerB57DCt9t8sSA= +github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb h1:EDmT6Q9Zs+SbUoc7Ik9EfrFqcylYqgPZ9ANSbTAntnE= +github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb/go.mod h1:ZjrT6AXHbDs86ZSdt/osfBi5qfexBrKUdONk989Wnk4= +github.com/compose-spec/compose-go/v2 v2.9.0 h1:UHSv/QHlo6QJtrT4igF1rdORgIUhDo1gWuyJUoiNNIM= +github.com/compose-spec/compose-go/v2 v2.9.0/go.mod h1:Oky9AZGTRB4E+0VbTPZTUu4Kp+oEMMuwZXZtPPVT1iE= +github.com/containerd/cgroups/v3 v3.0.5 h1:44na7Ud+VwyE7LIoJ8JTNQOa549a8543BmzaJHo6Bzo= +github.com/containerd/cgroups/v3 v3.0.5/go.mod h1:SA5DLYnXO8pTGYiAHXz94qvLQTKfVM5GEVisn4jpins= +github.com/containerd/console v1.0.5 h1:R0ymNeydRqH2DmakFNdmjR2k0t7UPuiOV/N/27/qqsc= +github.com/containerd/console v1.0.5/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk= +github.com/containerd/containerd/api v1.9.0 h1:HZ/licowTRazus+wt9fM6r/9BQO7S0vD5lMcWspGIg0= +github.com/containerd/containerd/api v1.9.0/go.mod h1:GhghKFmTR3hNtyznBoQ0EMWr9ju5AqHjcZPsSpTKutI= +github.com/containerd/containerd/v2 v2.1.5 h1:pWSmPxUszaLZKQPvOx27iD4iH+aM6o0BoN9+hg77cro= +github.com/containerd/containerd/v2 v2.1.5/go.mod h1:8C5QV9djwsYDNhxfTCFjWtTBZrqjditQ4/ghHSYjnHM= +github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4= +github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/fifo v1.1.0 h1:4I2mbh5stb1u6ycIABlBw9zgtlK8viPI9QkQNRQEEmY= +github.com/containerd/fifo v1.1.0/go.mod h1:bmC4NWMbXlt2EZ0Hc7Fx7QzTFxgPID13eH0Qu+MAb2o= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/nydus-snapshotter v0.15.2 h1:qsHI4M+Wwrf6Jr4eBqhNx8qh+YU0dSiJ+WPmcLFWNcg= +github.com/containerd/nydus-snapshotter v0.15.2/go.mod h1:FfwH2KBkNYoisK/e+KsmNr7xTU53DmnavQHMFOcXwfM= +github.com/containerd/platforms v1.0.0-rc.1 h1:83KIq4yy1erSRgOVHNk1HYdPvzdJ5CnsWaRoJX4C41E= +github.com/containerd/platforms v1.0.0-rc.1/go.mod h1:J71L7B+aiM5SdIEqmd9wp6THLVRzJGXfNuWCZCllLA4= +github.com/containerd/plugin v1.0.0 h1:c8Kf1TNl6+e2TtMHZt+39yAPDbouRH9WAToRjex483Y= +github.com/containerd/plugin v1.0.0/go.mod h1:hQfJe5nmWfImiqT1q8Si3jLv3ynMUIBB47bQ+KexvO8= +github.com/containerd/stargz-snapshotter v0.16.3 h1:zbQMm8dRuPHEOD4OqAYGajJJUwCeUzt4j7w9Iaw58u4= +github.com/containerd/stargz-snapshotter/estargz v0.16.3 h1:7evrXtoh1mSbGj/pfRccTampEyKpjpOnS3CyiV1Ebr8= +github.com/containerd/stargz-snapshotter/estargz v0.16.3/go.mod h1:uyr4BfYfOj3G9WBVE8cOlQmXAbPN9VEQpBBeJIuOipU= +github.com/containerd/ttrpc v1.2.7 h1:qIrroQvuOL9HQ1X6KHe2ohc7p+HP/0VE6XPU7elJRqQ= +github.com/containerd/ttrpc v1.2.7/go.mod h1:YCXHsb32f+Sq5/72xHubdiJRQY9inL4a4ZQrAbN1q9o= +github.com/containerd/typeurl/v2 v2.2.3 h1:yNA/94zxWdvYACdYO8zofhrTVuQY73fFU1y++dYSw40= +github.com/containerd/typeurl/v2 v2.2.3/go.mod h1:95ljDnPfD3bAbDJRugOiShd/DlAAsxGtUBhJxIn7SCk= +github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= +github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s= +github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/denisenkom/go-mssqldb v0.0.0-20191128021309-1d7a30a10f73/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/docker/buildx v0.29.1 h1:58hxM5Z4mnNje3G5NKfULT9xCr8ooM8XFtlfUK9bKaA= +github.com/docker/buildx v0.29.1/go.mod h1:J4EFv6oxlPiV1MjO0VyJx2u5tLM7ImDEl9zyB8d4wPI= +github.com/docker/cli v28.5.1+incompatible h1:ESutzBALAD6qyCLqbQSEf1a/U8Ybms5agw59yGVc+yY= +github.com/docker/cli v28.5.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli-docs-tool v0.10.0 h1:bOD6mKynPQgojQi3s2jgcUWGp/Ebqy1SeCr9VfKQLLU= +github.com/docker/cli-docs-tool v0.10.0/go.mod h1:5EM5zPnT2E7yCLERZmrDA234Vwn09fzRHP4aX1qwp1U= +github.com/docker/compose/v2 v2.40.2 h1:h2bDBJkOuqmj93XvT2oI0ArPQonE0lGtWiILXdiXvbA= +github.com/docker/compose/v2 v2.40.2/go.mod h1:CbSJpKGw20LInVsPjglZ8z7Squ3OBQOD7Ux5nkjGfIU= +github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM= +github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.9.4 h1:76ItO69/AP/V4yT9V4uuuItG0B1N8hvt0T0c0NN/DzI= +github.com/docker/docker-credential-helpers v0.9.4/go.mod h1:v1S+hepowrQXITkEfw6o4+BMbGot02wiKpzWhGUZK6c= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= +github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= +github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI= +github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8= +github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU5CAUmr9zpesgbU6SWc8/B4mflAE4= +github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= +github.com/dvsekhvalnov/jose2go v0.0.0-20170216131308-f21a8cedbbae/go.mod h1:7BvyPhdbLxMXIYTFPLsyJRFMsKmOZnQmzh6Gb+uquuM= +github.com/ebitengine/purego v0.9.0 h1:mh0zpKBIXDceC63hpvPuGLiJ8ZAa3DfrFTudmfi8A4k= +github.com/ebitengine/purego v0.9.0/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJtLmY22n99HaZTz+r2Z51xUPi01m3wg= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg= +github.com/emicklei/go-restful/v3 v3.13.0 h1:C4Bl2xDndpU6nJ4bc1jXd+uTmYPVUwkD6bFY/oTyCes= +github.com/emicklei/go-restful/v3 v3.13.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= +github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/fsnotify/fsevents v0.2.0 h1:BRlvlqjvNTfogHfeBOFvSC9N0Ddy+wzQCQukyoD7o/c= +github.com/fsnotify/fsevents v0.2.0/go.mod h1:B3eEk39i4hz8y1zaWS/wPrAP4O6wkIl7HQwKBr1qH/w= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fvbommel/sortorder v1.1.0 h1:fUmoe+HLsBTctBDoaBwpQo5N+nrCp8g/BjKb/6ZQmYw= +github.com/fvbommel/sortorder v1.1.0/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/fxamacker/cbor/v2 v2.9.0 h1:NpKPmjDBgUfBms6tr6JZkTHtfFGcMKsw3eGcmD/sapM= +github.com/fxamacker/cbor/v2 v2.9.0/go.mod h1:vM4b+DJCtHn+zz7h3FFp/hDAI9WNWCsZj23V5ytsSxQ= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/go-openapi/jsonpointer v0.22.1 h1:sHYI1He3b9NqJ4wXLoJDKmUmHkWy/L7rtEo92JUxBNk= +github.com/go-openapi/jsonpointer v0.22.1/go.mod h1:pQT9OsLkfz1yWoMgYFy4x3U5GY5nUlsOn1qSBH5MkCM= +github.com/go-openapi/jsonreference v0.21.2 h1:Wxjda4M/BBQllegefXrY/9aq1fxBA8sI5M/lFU6tSWU= +github.com/go-openapi/jsonreference v0.21.2/go.mod h1:pp3PEjIsJ9CZDGCNOyXIQxsNuroxm8FAJ/+quA0yKzQ= +github.com/go-openapi/swag v0.25.1 h1:6uwVsx+/OuvFVPqfQmOOPsqTcm5/GkBhNwLqIR916n8= +github.com/go-openapi/swag v0.25.1/go.mod h1:bzONdGlT0fkStgGPd3bhZf1MnuPkf2YAys6h+jZipOo= +github.com/go-openapi/swag/cmdutils v0.25.1 h1:nDke3nAFDArAa631aitksFGj2omusks88GF1VwdYqPY= +github.com/go-openapi/swag/cmdutils v0.25.1/go.mod h1:pdae/AFo6WxLl5L0rq87eRzVPm/XRHM3MoYgRMvG4A0= +github.com/go-openapi/swag/conv v0.25.1 h1:+9o8YUg6QuqqBM5X6rYL/p1dpWeZRhoIt9x7CCP+he0= +github.com/go-openapi/swag/conv v0.25.1/go.mod h1:Z1mFEGPfyIKPu0806khI3zF+/EUXde+fdeksUl2NiDs= +github.com/go-openapi/swag/fileutils v0.25.1 h1:rSRXapjQequt7kqalKXdcpIegIShhTPXx7yw0kek2uU= +github.com/go-openapi/swag/fileutils v0.25.1/go.mod h1:+NXtt5xNZZqmpIpjqcujqojGFek9/w55b3ecmOdtg8M= +github.com/go-openapi/swag/jsonname v0.25.1 h1:Sgx+qbwa4ej6AomWC6pEfXrA6uP2RkaNjA9BR8a1RJU= +github.com/go-openapi/swag/jsonname v0.25.1/go.mod h1:71Tekow6UOLBD3wS7XhdT98g5J5GR13NOTQ9/6Q11Zo= +github.com/go-openapi/swag/jsonutils v0.25.1 h1:AihLHaD0brrkJoMqEZOBNzTLnk81Kg9cWr+SPtxtgl8= +github.com/go-openapi/swag/jsonutils v0.25.1/go.mod h1:JpEkAjxQXpiaHmRO04N1zE4qbUEg3b7Udll7AMGTNOo= +github.com/go-openapi/swag/jsonutils/fixtures_test v0.25.1 h1:DSQGcdB6G0N9c/KhtpYc71PzzGEIc/fZ1no35x4/XBY= +github.com/go-openapi/swag/jsonutils/fixtures_test v0.25.1/go.mod h1:kjmweouyPwRUEYMSrbAidoLMGeJ5p6zdHi9BgZiqmsg= +github.com/go-openapi/swag/loading v0.25.1 h1:6OruqzjWoJyanZOim58iG2vj934TysYVptyaoXS24kw= +github.com/go-openapi/swag/loading v0.25.1/go.mod h1:xoIe2EG32NOYYbqxvXgPzne989bWvSNoWoyQVWEZicc= +github.com/go-openapi/swag/mangling v0.25.1 h1:XzILnLzhZPZNtmxKaz/2xIGPQsBsvmCjrJOWGNz/ync= +github.com/go-openapi/swag/mangling v0.25.1/go.mod h1:CdiMQ6pnfAgyQGSOIYnZkXvqhnnwOn997uXZMAd/7mQ= +github.com/go-openapi/swag/netutils v0.25.1 h1:2wFLYahe40tDUHfKT1GRC4rfa5T1B4GWZ+msEFA4Fl4= +github.com/go-openapi/swag/netutils v0.25.1/go.mod h1:CAkkvqnUJX8NV96tNhEQvKz8SQo2KF0f7LleiJwIeRE= +github.com/go-openapi/swag/stringutils v0.25.1 h1:Xasqgjvk30eUe8VKdmyzKtjkVjeiXx1Iz0zDfMNpPbw= +github.com/go-openapi/swag/stringutils v0.25.1/go.mod h1:JLdSAq5169HaiDUbTvArA2yQxmgn4D6h4A+4HqVvAYg= +github.com/go-openapi/swag/typeutils v0.25.1 h1:rD/9HsEQieewNt6/k+JBwkxuAHktFtH3I3ysiFZqukA= +github.com/go-openapi/swag/typeutils v0.25.1/go.mod h1:9McMC/oCdS4BKwk2shEB7x17P6HmMmA6dQRtAkSnNb8= +github.com/go-openapi/swag/yamlutils v0.25.1 h1:mry5ez8joJwzvMbaTGLhw8pXUnhDK91oSJLDPF1bmGk= +github.com/go-openapi/swag/yamlutils v0.25.1/go.mod h1:cm9ywbzncy3y6uPm/97ysW8+wZ09qsks+9RS8fLWKqg= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.11.2 h1:q3SHpufmypg+erIExEKUmsgmhDTyhcJ38oeKGACXohU= +github.com/go-playground/validator/v10 v10.11.2/go.mod h1:NieE624vt4SCTJtD87arVLvdmjPAeV8BQlHtMnw9D7s= +github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM= +github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA= +github.com/go-sql-driver/mysql v1.3.0 h1:pgwjLi/dvffoP9aabwkT3AKpXQM93QARkjFhDDqC1UE= +github.com/go-sql-driver/mysql v1.3.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= +github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= +github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/certificate-transparency-go v1.0.10-0.20180222191210-5ab67e519c93 h1:jc2UWq7CbdszqeH6qu1ougXMIUBfSy8Pbh/anURYbGI= +github.com/google/certificate-transparency-go v1.0.10-0.20180222191210-5ab67e519c93/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg= +github.com/google/gnostic-models v0.7.0 h1:qwTtogB15McXDaNqTZdzPJRHvaVJlAl+HVQnLmJEJxo= +github.com/google/gnostic-models v0.7.0/go.mod h1:whL5G0m6dmc5cPxKc5bdKdEN3UjI7OUGxBlw57miDrQ= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/pprof v0.0.0-20250403155104-27863c87afa6 h1:BHT72Gu3keYf3ZEu2J0b1vyeLSOYI8bm5wbJM/8yDe8= +github.com/google/pprof v0.0.0-20250403155104-27863c87afa6/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/mux v1.7.0/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo= +github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 h1:NmZ1PKzSTQbuGHw9DGPFomqkkLWMC+vZCkfs+FHv1Vg= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3/go.mod h1:zQrxl1YP88HQlA6i9c63DSVPFklWpGX4OWAc9bFuaH4= +github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8= +github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/in-toto/in-toto-golang v0.9.0 h1:tHny7ac4KgtsfrG6ybU8gVOZux2H8jN05AXJ9EBM1XU= +github.com/in-toto/in-toto-golang v0.9.0/go.mod h1:xsBVrVsHNsB61++S6Dy2vWosKhuA3lUTQd+eF9HdeMo= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf h1:FtEj8sfIcaaBfAKrE1Cwb61YDtYq9JxChK1c7AKce7s= +github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf/go.mod h1:yrqSXGoD/4EKfF26AOGzscPOgTTJcyAwM2rpixWT+t4= +github.com/jinzhu/gorm v0.0.0-20170222002820-5409931a1bb8 h1:CZkYfurY6KGhVtlalI4QwQ6T0Cu6iuY3e0x5RLu96WE= +github.com/jinzhu/gorm v0.0.0-20170222002820-5409931a1bb8/go.mod h1:Vla75njaFJ8clLU1W44h34PjIkijhjHIYnZxMqCdxqo= +github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d h1:jRQLvyVGL+iVtDElaEIDdKwpPqUIZJfzkNLV34htpEc= +github.com/jinzhu/inflection v0.0.0-20170102125226-1c35d901db3d/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I= +github.com/jonboulle/clockwork v0.5.0/go.mod h1:3mZlmanh0g2NDKO5TWZVJAfofYk64M7XN3SzBPjZF60= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.18.1 h1:bcSGx7UbpBqMChDtsF28Lw6v/G94LPrrbMbdC3JH2co= +github.com/klauspost/compress v1.18.1/go.mod h1:ZQFFVG+MdnR0P+l6wpXgIL4NTtwiKIdBnrBd8Nrxr+0= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leodido/go-urn v1.2.2 h1:7z68G0FCGvDk646jz1AelTYNYWrTNm0bEcFAo147wt4= +github.com/leodido/go-urn v1.2.2/go.mod h1:kUaIbLZWttglzwNuG0pgsh5vuV6u2YcGBYz1hIPjtOQ= +github.com/lib/pq v0.0.0-20150723085316-0dad96c0b94f/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3 h1:PwQumkgq4/acIiZhtifTV5OUqqiP82UAl0h87xj/l9k= +github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3/go.mod h1:autxFIvghDt3jPTLoqZ9OZ7s9qTGNAWmYCjVFWPX/zg= +github.com/magiconair/properties v1.5.3/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= +github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= +github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= +github.com/mattn/go-sqlite3 v1.6.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= +github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4= +github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE= +github.com/mitchellh/mapstructure v0.0.0-20150613213606-2caf8efc9366/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/buildkit v0.25.1 h1:j7IlVkeNbEo+ZLoxdudYCHpmTsbwKvhgc/6UJ/mY/o8= +github.com/moby/buildkit v0.25.1/go.mod h1:phM8sdqnvgK2y1dPDnbwI6veUCXHOZ6KFSl6E164tkc= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= +github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= +github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg= +github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/spdystream v0.5.0 h1:7r0J1Si3QO/kjRitvSLVVFUjxMEb/YLj6S9FF62JBCU= +github.com/moby/spdystream v0.5.0/go.mod h1:xBAYlnt/ay+11ShkdFKNAG7LsyK/tmNBVvVOwrfMgdI= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/capability v0.4.0 h1:4D4mI6KlNtWMCM1Z/K0i7RV1FkX+DBDHKVJpCndZoHk= +github.com/moby/sys/capability v0.4.0/go.mod h1:4g9IK291rVkms3LKCDOoYlnV8xKwoDTpIrNEE35Wq0I= +github.com/moby/sys/mountinfo v0.7.2 h1:1shs6aH5s4o5H2zQLn796ADW1wMrIwHsyJ2v9KouLrg= +github.com/moby/sys/mountinfo v0.7.2/go.mod h1:1YOa8w8Ih7uW0wALDUgT1dTTSBrZ+HiBLGws92L2RU4= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/signal v0.7.1 h1:PrQxdvxcGijdo6UXXo/lU/TvHUWyPhj7UOpSo8tuvk0= +github.com/moby/sys/signal v0.7.1/go.mod h1:Se1VGehYokAkrSQwL4tDzHvETwUZlnY7S5XtQ50mQp8= +github.com/moby/sys/symlink v0.3.0 h1:GZX89mEZ9u53f97npBy4Rc3vJKj7JBDj/PN2I22GrNU= +github.com/moby/sys/symlink v0.3.0/go.mod h1:3eNdhduHmYPcgsJtZXW1W4XUJdZGBIkttZ8xKqPUJq0= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= +github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/modern-go/reflect2 v1.0.3-0.20250322232337-35a7c28c31ee h1:W5t00kpgFdJifH4BDsTlE89Zl93FEloxaWZfGcifgq8= +github.com/modern-go/reflect2 v1.0.3-0.20250322232337-35a7c28c31ee/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.0 h1:Iw5WCbBcaAAd0fpRb1c9r5YCylv4XDoCSigm1zLevwU= +github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= +github.com/onsi/ginkgo/v2 v2.21.0 h1:7rg/4f3rB88pb5obDgNZrNHrQ4e6WpjonchcpuBRnZM= +github.com/onsi/ginkgo/v2 v2.21.0/go.mod h1:7Du3c42kxCUegi0IImZ1wUQzMBVecgIHjR1C+NkhLQo= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= +github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= +github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= +github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/opencontainers/runtime-spec v1.2.1 h1:S4k4ryNgEpxW1dzyqffOmhI1BHYcjzU8lpJfSlR0xww= +github.com/opencontainers/runtime-spec v1.2.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/selinux v1.12.0 h1:6n5JV4Cf+4y0KNXW48TLj5DwfXpvWlxXplUkdTrmPb8= +github.com/opencontainers/selinux v1.12.0/go.mod h1:BTPX+bjVbWGXw7ZZWUbdENt8w0htPSrlgOOysQaU62U= +github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.9 h1:4NGkvGudBL7GteO3m6qnaQ4pC0Kvf0onSVc9gR3EWBw= +github.com/pkg/sftp v1.13.9/go.mod h1:OBN7bVXdstkFFN/gdnHPUb5TE8eb8G1Rp9wCItqjkkA= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v0.9.0-pre1.0.20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= +github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o= +github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg= +github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= +github.com/prometheus/common v0.67.2 h1:PcBAckGFTIHt2+L3I33uNRTlKTplNzFctXcWhPyAEN8= +github.com/prometheus/common v0.67.2/go.mod h1:63W3KZb1JOKgcjlIr64WW/LvFGAqKPj0atm+knVGEko= +github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= +github.com/prometheus/procfs v0.19.1 h1:QVtROpTkphuXuNlnCv3m1ut3JytkXHtQ3xvck/YmzMM= +github.com/prometheus/procfs v0.19.1/go.mod h1:M0aotyiemPhBCM0z5w87kL22CxfcH05ZpYlu+b4J7mw= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/rwtodd/Go.Sed v0.0.0-20210816025313-55464686f9ef/go.mod h1:8AEUvGVi2uQ5b24BIhcr0GCcpd/RNAFWaN2CJFrWIIQ= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= +github.com/secure-systems-lab/go-securesystemslib v0.9.1 h1:nZZaNz4DiERIQguNy0cL5qTdn9lR8XKHf4RUyG1Sx3g= +github.com/secure-systems-lab/go-securesystemslib v0.9.1/go.mod h1:np53YzT0zXGMv6x4iEWc9Z59uR+x+ndLwCLqPYpLXVU= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc= +github.com/shellhub-io/shellhub v0.20.1 h1:5txMp3E2LG/fHxPuiLg6208pIj02EDWsxr43TGgGr+g= +github.com/shellhub-io/shellhub v0.20.1/go.mod h1:/a58sSNTGpJhRTGDFjdIW6XFRbVX0qWJIPA2NOx7agg= +github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= +github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= +github.com/shirou/gopsutil/v4 v4.25.9 h1:JImNpf6gCVhKgZhtaAHJ0serfFGtlfIlSC08eaKdTrU= +github.com/shirou/gopsutil/v4 v4.25.9/go.mod h1:gxIxoC+7nQRwUl/xNhutXlD8lq+jxTgpIkEf3rADHL8= +github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= +github.com/spdx/tools-golang v0.5.5 h1:61c0KLfAcNqAjlg6UNMdkwpMernhw3zVRwDZ2x9XOmk= +github.com/spdx/tools-golang v0.5.5/go.mod h1:MVIsXx8ZZzaRWNQpUDhC4Dud34edUYJYecciXgrw5vE= +github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg= +github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v0.0.1/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= +github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= +github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431 h1:XTHrT015sxHyJ5FnQ0AeemSspZWaDq7DoTRW0EVsDCE= +github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.0/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c h1:2EejZtjFjKJGk71ANb+wtFK5EjUzUkEM3R0xnp559xg= +github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/testcontainers/testcontainers-go v0.39.0 h1:uCUJ5tA+fcxbFAB0uP3pIK3EJ2IjjDUHFSZ1H1UxAts= +github.com/testcontainers/testcontainers-go v0.39.0/go.mod h1:qmHpkG7H5uPf/EvOORKvS6EuDkBUPE3zpVGaH9NL7f8= +github.com/testcontainers/testcontainers-go/modules/compose v0.39.1 h1:/3kEZY3xH/ibgnUGAACmEcqVIqfXzqD1LdXnJqfsBrM= +github.com/testcontainers/testcontainers-go/modules/compose v0.39.1/go.mod h1:qxH8QmljpneFWkGJ7RzjPOnoyRN760OZdq3bE0aG/bg= +github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c= +github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= +github.com/tklauser/go-sysconf v0.3.15 h1:VE89k0criAymJ/Os65CSn1IXaol+1wrsFHEB8Ol49K4= +github.com/tklauser/go-sysconf v0.3.15/go.mod h1:Dmjwr6tYFIseJw7a3dRLJfsHAMXZ3nEnL/aZY+0IuI4= +github.com/tklauser/numcpus v0.10.0 h1:18njr6LDBk1zuna922MgdjQuJFjrdppsZG60sHGfjso= +github.com/tklauser/numcpus v0.10.0/go.mod h1:BiTKazU708GQTYF4mB+cmlpT2Is1gLk7XVuEeem8LsQ= +github.com/tonistiigi/dchapes-mode v0.0.0-20250318174251-73d941a28323 h1:r0p7fK56l8WPequOaR3i9LBqfPtEdXIQbUTzT55iqT4= +github.com/tonistiigi/dchapes-mode v0.0.0-20250318174251-73d941a28323/go.mod h1:3Iuxbr0P7D3zUzBMAZB+ois3h/et0shEz0qApgHYGpY= +github.com/tonistiigi/fsutil v0.0.0-20250605211040-586307ad452f h1:MoxeMfHAe5Qj/ySSBfL8A7l1V+hxuluj8owsIEEZipI= +github.com/tonistiigi/fsutil v0.0.0-20250605211040-586307ad452f/go.mod h1:BKdcez7BiVtBvIcef90ZPc6ebqIWr4JWD7+EvLm6J98= +github.com/tonistiigi/go-csvvalue v0.0.0-20240814133006-030d3b2625d0 h1:2f304B10LaZdB8kkVEaoXvAMVan2tl9AiK4G0odjQtE= +github.com/tonistiigi/go-csvvalue v0.0.0-20240814133006-030d3b2625d0/go.mod h1:278M4p8WsNh3n4a1eqiFcV2FGk7wE5fwUpUom9mK9lE= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= +github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab h1:H6aJ0yKQ0gF49Qb2z5hI1UHxSQt4JMyxebFR15KnApw= +github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc= +github.com/vbatts/tar-split v0.12.1 h1:CqKoORW7BUWBe7UL/iqTVvkTBOF8UvOMKOIZykxnnbo= +github.com/vbatts/tar-split v0.12.1/go.mod h1:eF6B6i6ftWQcDqEn3/iGFRFRo8cBIMSJVOpnNdfTMFA= +github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= +github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= +github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc= +github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zclconf/go-cty v1.17.0 h1:seZvECve6XX4tmnvRzWtJNHdscMtYEx5R7bnnVyd/d0= +github.com/zclconf/go-cty v1.17.0/go.mod h1:wqFzcImaLTI6A5HfsRwB0nj5n0MRZFwmey8YoFPPs3U= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0 h1:YH4g8lQroajqUwWbq/tr2QX1JFmEXaDLgG+ew9bLMWo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0/go.mod h1:fvPi2qXDqFs8M4B4fmJhE92TyQs9Ydjlg3RvfUp+NbQ= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 h1:2pn7OzMewmYRiNtv1doZnLo3gONcnMHlFnmOR8Vgt+8= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0/go.mod h1:rjbQTDEPQymPE0YnRQp9/NuPwwtL0sesz/fnqRW/v84= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg= +go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= +go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0 h1:vl9obrcoWVKp/lwl8tRE33853I8Xru9HFbw/skNeLs8= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0/go.mod h1:GAXRxmLJcVM3u22IjTg74zWBrRCKq8BnOqUVLodpcpw= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0 h1:Oe2z/BCg5q7k4iXC3cqJxKYg0ieRiOqF0cecFYdPTwk= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0/go.mod h1:ZQM5lAJpOsKnYagGg/zV2krVqTtaVdYdDkhMoX6Oalg= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 h1:lwI4Dc5leUqENgGuQImwLo4WnuXFPetmPpkLi2IrX54= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0/go.mod h1:Kz/oCE7z5wuyhPxsXDuaPteSWqjSBD5YaSdbxZYGbGk= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4= +go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= +go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= +go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= +go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= +go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= +go.opentelemetry.io/proto/otlp v1.8.0 h1:fRAZQDcAFHySxpJ1TwlA1cJ4tvcrw7nXl9xWWC8N5CE= +go.opentelemetry.io/proto/otlp v1.8.0/go.mod h1:tIeYOeNBU4cvmPqpaji1P+KbB4Oloai8wN4rWzRrFF0= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= +go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0= +go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM= +golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY= +golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210331175145-43e1dd70ce54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/genproto/googleapis/api v0.0.0-20251022142026-3a174f9686a8 h1:mepRgnBZa07I4TRuomDE4sTIYieg/osKmzIf4USdWS4= +google.golang.org/genproto/googleapis/api v0.0.0-20251022142026-3a174f9686a8/go.mod h1:fDMmzKV90WSg1NbozdqrE64fkuTv6mlq2zxo9ad+3yo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251022142026-3a174f9686a8 h1:M1rk8KBnUsBDg1oPGHNCxG4vc1f49epmTO7xscSajMk= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251022142026-3a174f9686a8/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= +google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= +google.golang.org/grpc v1.76.0/go.mod h1:Ju12QI8M6iQJtbcsV+awF5a4hfJMLi4X0JLo94ULZ6c= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII= +gopkg.in/cenkalti/backoff.v2 v2.2.1/go.mod h1:S0QdOvT2AlerfSBkp0O+dk+bbIMaNbEmVk876gPCthU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/evanphx/json-patch.v4 v4.13.0 h1:czT3CmqEaQ1aanPc5SdlgQrrEIb8w/wwCvWWnfEbYzo= +gopkg.in/evanphx/json-patch.v4 v4.13.0/go.mod h1:p8EYWUEYMpynmqDbY58zCKCFZw8pRWMG4EsWvDvM72M= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/rethinkdb/rethinkdb-go.v6 v6.2.1 h1:d4KQkxAaAiRY2h5Zqis161Pv91A37uZyJOx73duwUwM= +gopkg.in/rethinkdb/rethinkdb-go.v6 v6.2.1/go.mod h1:WbjuEoo1oadwzQ4apSDU+JTvmllEHtsNHS6y7vFc7iw= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q= +gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA= +k8s.io/api v0.34.1 h1:jC+153630BMdlFukegoEL8E/yT7aLyQkIVuwhmwDgJM= +k8s.io/api v0.34.1/go.mod h1:SB80FxFtXn5/gwzCoN6QCtPD7Vbu5w2n1S0J5gFfTYk= +k8s.io/apimachinery v0.34.1 h1:dTlxFls/eikpJxmAC7MVE8oOeP1zryV7iRyIjB0gky4= +k8s.io/apimachinery v0.34.1/go.mod h1:/GwIlEcWuTX9zKIg2mbw0LRFIsXwrfoVxn+ef0X13lw= +k8s.io/client-go v0.34.1 h1:ZUPJKgXsnKwVwmKKdPfw4tB58+7/Ik3CrjOEhsiZ7mY= +k8s.io/client-go v0.34.1/go.mod h1:kA8v0FP+tk6sZA0yKLRG67LWjqufAoSHA2xVGKw9Of8= +k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= +k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= +k8s.io/kube-openapi v0.0.0-20250910181357-589584f1c912 h1:Y3gxNAuB0OBLImH611+UDZcmKS3g6CthxToOb37KgwE= +k8s.io/kube-openapi v0.0.0-20250910181357-589584f1c912/go.mod h1:kdmbQkyfwUagLfXIad1y2TdrjPFWp2Q89B3qkRwf/pQ= +k8s.io/utils v0.0.0-20251002143259-bc988d571ff4 h1:SjGebBtkBqHFOli+05xYbK8YF1Dzkbzn+gDM4X9T4Ck= +k8s.io/utils v0.0.0-20251002143259-bc988d571ff4/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +sigs.k8s.io/json v0.0.0-20250730193827-2d320260d730 h1:IpInykpT6ceI+QxKBbEflcR5EXP7sU1kvOlxwZh5txg= +sigs.k8s.io/json v0.0.0-20250730193827-2d320260d730/go.mod h1:mdzfpAEoE6DHQEN0uh9ZbOCuHbLK5wOm7dK4ctXE9Tg= +sigs.k8s.io/randfill v1.0.0 h1:JfjMILfT8A6RbawdsK2JXGBR5AQVfd+9TbzrlneTyrU= +sigs.k8s.io/randfill v1.0.0/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY= +sigs.k8s.io/structured-merge-diff/v6 v6.3.0 h1:jTijUJbW353oVOd9oTlifJqOGEkUw2jB/fXCbTiQEco= +sigs.k8s.io/structured-merge-diff/v6 v6.3.0/go.mod h1:M3W8sfWvn2HhQDIbGWj3S099YozAsymCo/wrT5ohRUE= +sigs.k8s.io/yaml v1.6.0 h1:G8fkbMSAFqgEFgh4b1wmtzDnioxFCUgTZhlbj5P9QYs= +sigs.k8s.io/yaml v1.6.0/go.mod h1:796bPqUfzR/0jLAl6XjHl3Ck7MiyVv8dbTdyT3/pMf4= +tags.cncf.io/container-device-interface v1.0.1 h1:KqQDr4vIlxwfYh0Ed/uJGVgX+CHAkahrgabg6Q8GYxc= +tags.cncf.io/container-device-interface v1.0.1/go.mod h1:JojJIOeW3hNbcnOH2q0NrWNha/JuHoDZcmYxAZwb2i0= diff --git a/tests/main_test.go b/tests/main_test.go new file mode 100644 index 00000000000..c42171f9147 --- /dev/null +++ b/tests/main_test.go @@ -0,0 +1,95 @@ +package main + +import ( + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "encoding/pem" + "os" + "testing" + + log "github.com/sirupsen/logrus" +) + +// keygen generates private and public keys required to startup a ShellHub instance. +func keygen() error { + const PrivateKeyPermission uint = 0o600 + const PublicKeyPermission uint = 0o644 + + const APIPrivatKeyPath string = "../api_private_key" + const APIPublicKeyPath string = "../api_public_key" + const SSHPrivateKey string = "../ssh_private_key" + + if _, err := os.Stat(SSHPrivateKey); os.IsNotExist(err) { + sshPrivateKey, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + return err + } + + bytesSSHPrivateKey, err := x509.MarshalPKCS8PrivateKey(sshPrivateKey) + if err != nil { + return err + } + + if err := os.WriteFile(SSHPrivateKey, pem.EncodeToMemory( + &pem.Block{ + Type: "PRIVATE KEY", + Bytes: bytesSSHPrivateKey, + }, + ), os.FileMode(PrivateKeyPermission)); err != nil { + return err + } + } + + if _, err := os.Stat(APIPrivatKeyPath); os.IsNotExist(err) { + apiPrivateKey, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + return err + } + + bytesAPIPrivateKey, err := x509.MarshalPKCS8PrivateKey(apiPrivateKey) + if err != nil { + return err + } + + if err := os.WriteFile(APIPrivatKeyPath, pem.EncodeToMemory( + &pem.Block{ + Type: "PRIVATE KEY", + Bytes: bytesAPIPrivateKey, + }, + ), os.FileMode(PrivateKeyPermission)); err != nil { + return err + } + + bytesAPIPublicKey, err := x509.MarshalPKIXPublicKey(&apiPrivateKey.PublicKey) + if err != nil { + return err + } + + if err := os.WriteFile(APIPublicKeyPath, pem.EncodeToMemory( + &pem.Block{ + Type: "PUBLIC KEY", + Bytes: bytesAPIPublicKey, + }, + ), os.FileMode(PublicKeyPermission)); err != nil { + return err + } + } + + return nil +} + +func TestMain(m *testing.M) { + // INFO: Due to issue related on testcontainers-go, we are disabling Ryuk it as a temporary solution. + // + // https://github.com/testcontainers/testcontainers-go/issues/2445 + os.Setenv("TESTCONTAINERS_RYUK_DISABLED", "true") + + if err := keygen(); err != nil { + log.WithError(err).Error("failed to generate the ShellHub keys") + + os.Exit(1) + } + + os.Exit(m.Run()) +} diff --git a/tests/ssh_test.go b/tests/ssh_test.go new file mode 100644 index 00000000000..0fd85e80cc8 --- /dev/null +++ b/tests/ssh_test.go @@ -0,0 +1,1431 @@ +package main + +import ( + "bufio" + "bytes" + "context" + "crypto/rand" + "crypto/rsa" + "fmt" + "io" + "log" + "net" + "os" + "strconv" + "strings" + "sync" + "testing" + "time" + + "github.com/bramvdbogaerde/go-scp" + "github.com/pkg/sftp" + "github.com/shellhub-io/shellhub/pkg/api/requests" + "github.com/shellhub-io/shellhub/pkg/models" + "github.com/shellhub-io/shellhub/tests/environment" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go" + "golang.org/x/crypto/ssh" +) + +var ( + ShellHubAgentUsername = "root" + ShellHubAgentPassword = "password" +) + +const ( + ShellHubUsername = "test" + ShellHubPassword = "password" + ShellHubNamespaceName = "testspace" + ShellHubNamespace = "00000000-0000-4000-0000-000000000000" + ShellHubEmail = "test@ossystems.com.br" +) + +type NewAgentContainerOption func(envs map[string]string) + +func NewAgentContainerWithIdentity(identity string) NewAgentContainerOption { + return func(envs map[string]string) { + envs["SHELLHUB_PREFERRED_IDENTITY"] = identity + } +} + +func NewAgentContainerWithConnectionVersion(version int) NewAgentContainerOption { + return func(envs map[string]string) { + envs["SHELLHUB_TRANSPORT_VERSION"] = fmt.Sprintf("%d", version) + } +} + +func NewAgentContainer(ctx context.Context, port string, opts ...NewAgentContainerOption) (testcontainers.Container, error) { + envs := map[string]string{ + "SHELLHUB_SERVER_ADDRESS": fmt.Sprintf("http://localhost:%s", port), + "SHELLHUB_TENANT_ID": "00000000-0000-4000-0000-000000000000", + "SHELLHUB_PRIVATE_KEY": "/tmp/shellhub.key", + "SHELLHUB_LOG_FORMAT": "json", + "SHELLHUB_KEEPALIVE_INTERVAL": "1", + "SHELLHUB_LOG_LEVEL": "trace", + "SHELLHUB_TRANSPORT_VERSION": "1", // Default to v1 for compatibility + } + + for _, opt := range opts { + opt(envs) + } + + c, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Env: envs, + NetworkMode: "host", + FromDockerfile: testcontainers.FromDockerfile{ + Repo: "agent", + Tag: "test", + Context: "..", + Dockerfile: "agent/Dockerfile.test", + PrintBuildLog: false, + KeepImage: false, + BuildArgs: map[string]*string{ + "USERNAME": &ShellHubAgentUsername, + "PASSWORD": &ShellHubAgentPassword, + }, + }, + }, + Logger: log.New(io.Discard, "", log.LstdFlags), + }) + if err != nil { + return nil, err + } + + return c, nil +} + +func TestSSH(t *testing.T) { + // Run all tests with both v1 and v2 + for _, version := range []int{1, 2} { + t.Run(fmt.Sprintf("connection_v%d", version), func(t *testing.T) { + testSSHWithVersion(t, version) + }) + } +} + +func testSSHWithVersion(t *testing.T, connectionVersion int) { + type Environment struct { + services *environment.DockerCompose + agent testcontainers.Container + } + + tests := []struct { + name string + options []NewAgentContainerOption + run func(*testing.T, *Environment, *models.Device) + }{ + { + name: "reconnect to server", + run: func(t *testing.T, environment *Environment, device *models.Device) { + ctx := context.Background() + + err := environment.agent.Stop(ctx, nil) + require.NoError(t, err) + + err = environment.agent.Start(ctx) + require.NoError(t, err) + + model := models.Device{} + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + resp, err := environment.services.R(ctx). + SetResult(&model). + Get(fmt.Sprintf("/api/devices/%s", device.UID)) + assert.Equal(tt, 200, resp.StatusCode()) + assert.NoError(tt, err) + + assert.True(tt, model.Online) + }, 30*time.Second, 1*time.Second) + }, + }, + { + name: "reconnect to server with custom identity", + options: []NewAgentContainerOption{ + NewAgentContainerWithIdentity("test"), + }, + run: func(t *testing.T, environment *Environment, device *models.Device) { + ctx := context.Background() + + err := environment.agent.Stop(ctx, nil) + require.NoError(t, err) + + err = environment.agent.Start(ctx) + require.NoError(t, err) + + model := models.Device{} + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + resp, err := environment.services.R(ctx). + SetResult(&model). + Get(fmt.Sprintf("/api/devices/%s", device.UID)) + assert.Equal(tt, 200, resp.StatusCode()) + assert.NoError(tt, err) + + assert.True(tt, model.Online) + }, 30*time.Second, 1*time.Second) + }, + }, + { + name: "authenticate with password", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + conn.Close() + }, + }, + { + name: "fail to authenticate with password", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password("wrongpassword"), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + _, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.Error(t, err) + }, + }, + { + name: "authenticate with password with custom identity", + options: []NewAgentContainerOption{ + NewAgentContainerWithIdentity("test"), + }, + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + conn.Close() + }, + }, + { + name: "authenticate with public key", + run: func(t *testing.T, environment *Environment, device *models.Device) { + ctx := context.Background() + + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + publicKey, err := ssh.NewPublicKey(&privateKey.PublicKey) + require.NoError(t, err) + + model := requests.PublicKeyCreate{ + Name: ShellHubAgentUsername, + Username: ".*", + Data: ssh.MarshalAuthorizedKey(publicKey), + Filter: requests.PublicKeyFilter{ + Hostname: ".*", + }, + } + + resp, err := environment.services.R(ctx). + SetBody(&model). + Post("/api/sshkeys/public-keys") + require.Equal(t, 200, resp.StatusCode()) + require.NoError(t, err) + + signer, err := ssh.NewSignerFromKey(privateKey) + require.NoError(t, err) + + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.PublicKeys(signer), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + + conn.Close() + }, + }, + { + name: "fail to authenticate with public key", + run: func(t *testing.T, environment *Environment, device *models.Device) { + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + signer, err := ssh.NewSignerFromKey(privateKey) + require.NoError(t, err) + + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.PublicKeys(signer), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + _, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.Error(t, err) + }, + }, + /*{ + name: "connection keepalive when session is requested", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var globalConn ssh.Conn + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + dialed, err := net.DialTimeout("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config.Timeout) + assert.NoError(tt, err) + + conn, _, _, err := ssh.NewClientConn(dialed, fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + + globalConn = conn + }, 30*time.Second, 1*time.Second) + + ch, reqs, err := globalConn.OpenChannel("session", nil) + assert.NoError(t, err) + + ok, err := ch.SendRequest("shell", true, nil) + assert.True(t, ok) + assert.NoError(t, err) + + req := <-reqs + assert.True(t, strings.HasPrefix(req.Type, "keepalive")) + + ch.Close() + globalConn.Close() + }, + }*/ + { + name: "connection SHELL with Pty", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + sess, err := conn.NewSession() + require.NoError(t, err) + + err = sess.RequestPty("xterm", 100, 100, ssh.TerminalModes{ + ssh.ECHO: 1, + ssh.TTY_OP_ISPEED: 14400, + ssh.TTY_OP_OSPEED: 14400, + }) + require.NoError(t, err) + + err = sess.Shell() + require.NoError(t, err) + + sess.Close() + conn.Close() + }, + }, + { + name: "connection EXEC and a SHELL on same connection", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password("password"), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + { + sess, err := conn.NewSession() + require.NoError(t, err) + + output, err := sess.Output(`echo -n "test"`) + require.NoError(t, err) + + assert.Equal(t, "test", string(output)) + + sess.Close() + } + { + sess, err := conn.NewSession() + require.NoError(t, err) + + err = sess.RequestPty("xterm", 100, 100, ssh.TerminalModes{ + ssh.ECHO: 1, + ssh.TTY_OP_ISPEED: 14400, + ssh.TTY_OP_OSPEED: 14400, + }) + require.NoError(t, err) + + err = sess.Shell() + require.NoError(t, err) + + sess.Close() + } + + conn.Close() + }, + }, + { + name: "connection EXEC", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password("password"), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + sess, err := conn.NewSession() + require.NoError(t, err) + + output, err := sess.Output(`echo -n "test"`) + require.NoError(t, err) + + assert.Equal(t, "test", string(output)) + + sess.Close() + conn.Close() + }, + }, + { + name: "connection EXEC with non zero status code", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + sess, err := conn.NewSession() + require.NoError(t, err) + + var status *ssh.ExitError + + // NOTICE: write to stderr to simulate a error from connection. + output, err := sess.CombinedOutput(`echo -n "test" 1>&2; exit 142`) + require.ErrorAs(t, err, &status) + + assert.Equal(t, 142, status.ExitStatus()) + assert.Equal(t, "test", string(output)) + + sess.Close() + conn.Close() + }, + }, + { + name: "connection EXEC with custom identity", + options: []NewAgentContainerOption{ + NewAgentContainerWithIdentity("test"), + }, + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + sess, err := conn.NewSession() + require.NoError(t, err) + + output, err := sess.Output(`echo -n "test"`) + require.NoError(t, err) + + assert.Equal(t, "test", string(output)) + + sess.Close() + conn.Close() + }, + }, + { + name: "connection SFTP to upload file", + options: []NewAgentContainerOption{}, + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + sess, err := sftp.NewClient(conn) + require.NoError(t, err) + + sent, err := sess.OpenFile("/tmp/sent", (os.O_WRONLY | os.O_CREATE | os.O_TRUNC)) + require.NoError(t, err) + + wrote, err := fmt.Fprintf(sent, "sent file content") + require.NoError(t, err) + + assert.Equal(t, 17, wrote) + + sess.Close() + conn.Close() + }, + }, + { + name: "connection SFTP to download file", + options: []NewAgentContainerOption{}, + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + sess, err := sftp.NewClient(conn) + require.NoError(t, err) + + received, err := sess.OpenFile("/etc/os-release", (os.O_RDONLY)) + require.NoError(t, err) + + var data string + + _, err = fmt.Fscanf(received, "%s", &data) + require.NoError(t, err) + + // NOTICE: This assertion brake if the Docker image used to build the Agent wasn't the Alpine. + assert.Contains(t, data, "Alpine") + + sess.Close() + conn.Close() + }, + }, + { + name: "connection SCP to upload file", + options: []NewAgentContainerOption{}, + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + sess, err := scp.NewClientBySSH(conn) + require.NoError(t, err) + + ctx := context.Background() + + file := bytes.NewBuffer(make([]byte, 1024)) + + err = sess.CopyFilePassThru(ctx, file, "/tmp/sent", "0644", io.LimitReader) + require.NoError(t, err) + + sess.Close() + conn.Close() + }, + }, + { + name: "connection SCP to download file", + options: []NewAgentContainerOption{}, + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + var conn *ssh.Client + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + var err error + + conn, err = ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + sess, err := scp.NewClientBySSH(conn) + require.NoError(t, err) + + ctx := context.Background() + + file := bytes.NewBuffer(make([]byte, 1024)) + + err = sess.CopyFromRemotePassThru(ctx, file, "/etc/os-release", nil) + require.NoError(t, err) + + sess.Close() + conn.Close() + }, + }, + { + name: "direct tcpip port redirect", + options: []NewAgentContainerOption{}, + run: func(t *testing.T, env *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", env.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + + type Data struct { + DestAddr string + DestPort uint32 + OriginAddr string + OriginPort uint32 + } + + port := environment.GetFreePort(t) + + listener, err := net.Listen("tcp", ":"+port) + require.NoError(t, err) + + wg := new(sync.WaitGroup) + + wg.Add(1) + go func() { + defer wg.Done() + + conn, err := listener.Accept() + require.NoError(t, err) + + buffer := make([]byte, 1024) + + read, err := conn.Read(buffer) + require.NoError(t, err) + + require.Equal(t, read, 4) + require.Equal(t, "test", string(buffer[:4])) + + conn.Close() + }() + + dest, err := strconv.Atoi(port) + require.NoError(t, err) + + orig, err := strconv.Atoi(environment.GetFreePort(t)) + require.NoError(t, err) + + data := Data{ + DestAddr: "0.0.0.0", + DestPort: uint32(dest), //nolint:gosec + OriginAddr: "127.0.0.1", + OriginPort: uint32(orig), //nolint:gosec + } + + ch, _, err := conn.OpenChannel("direct-tcpip", ssh.Marshal(data)) + require.NoError(t, err) + + wrote, err := ch.Write([]byte("test")) + require.NoError(t, err) + + require.Equal(t, wrote, 4) + + wg.Wait() + + ch.Close() + conn.Close() + }, + }, + { + name: "session timeout behavior", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + + sess, err := conn.NewSession() + require.NoError(t, err) + + output, err := sess.CombinedOutput("sleep 3 && echo -n 'still alive'") + require.NoError(t, err) + + assert.Equal(t, "still alive", string(output)) + + sess.Close() + conn.Close() + }, + }, + { + name: "connection SFTP to upload large file", + options: []NewAgentContainerOption{}, + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + client, err := sftp.NewClient(conn) + require.NoError(t, err) + defer client.Close() + + // Create a large file (10MB) + fileSize := 10 * 1024 * 1024 // 10MB + randomData := make([]byte, fileSize) + _, err = rand.Read(randomData) + require.NoError(t, err) + + tempFile, err := os.CreateTemp("", "large-file-test-*.bin") + require.NoError(t, err) + defer os.Remove(tempFile.Name()) + + _, err = tempFile.Write(randomData) + require.NoError(t, err) + tempFile.Close() + + localFile, err := os.Open(tempFile.Name()) + require.NoError(t, err) + defer localFile.Close() + + remoteFile, err := client.Create("/tmp/large-file-test.bin") + require.NoError(t, err) + defer remoteFile.Close() + + written, err := io.Copy(remoteFile, localFile) + require.NoError(t, err) + assert.Equal(t, int64(fileSize), written) + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + output, err := sess.Output("stat -c %s /tmp/large-file-test.bin") + require.NoError(t, err) + + size, err := strconv.ParseInt(strings.TrimSpace(string(output)), 10, 64) + require.NoError(t, err) + assert.Equal(t, int64(fileSize), size) + }, + }, + { + name: "connection EXEC with large output", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + // Generate large output (around 1MB). + output, err := sess.Output("yes X | tr -d '\n' | head -c 1048576") + require.NoError(t, err) + + assert.Equal(t, 1024*1024, len(output)) + for _, b := range output { + assert.Equal(t, byte('X'), b) + } + }, + }, + { + name: "connection EXEC with environment variables", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + err = sess.Setenv("TEST_VAR1", "test_value1") + require.NoError(t, err) + err = sess.Setenv("TEST_VAR2", "test_value2") + require.NoError(t, err) + + output, err := sess.Output("echo -n $TEST_VAR1-$TEST_VAR2") + require.NoError(t, err) + + assert.Equal(t, "test_value1-test_value2", string(output)) + }, + }, + { + name: "terminal window size change", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + initialWidth, initialHeight := 80, 24 + err = sess.RequestPty("xterm", initialHeight, initialWidth, ssh.TerminalModes{ + ssh.ECHO: 1, + }) + require.NoError(t, err) + + stdin, _ := sess.StdinPipe() + stdout, _ := sess.StdoutPipe() + + err = sess.Shell() + require.NoError(t, err) + + reader := bufio.NewReader(stdout) + + // NOTE: Disable bracketed paste mode to simplify output parsing. + _, err = fmt.Fprintln(stdin, "bind 'set enable-bracketed-paste off'") + require.NoError(t, err) + + _, err = fmt.Fprintln(stdin, "echo START") + require.NoError(t, err) + + // NOTE: Wait for the shell to be ready. + for { + line, err := reader.ReadString('\n') + require.NoError(t, err) + + if strings.TrimSpace(line) == "START" { + break + } + } + + _, err = fmt.Fprintln(stdin, "stty size") + require.NoError(t, err) + + // NOTE: Read and discard the command echo line. + reader.ReadString('\n') + + initialSizeOutput, err := reader.ReadString('\n') // Read line + require.NoError(t, err) + + assert.Equal(t, fmt.Sprintf("%d %d", initialHeight, initialWidth), strings.TrimSpace(initialSizeOutput)) + + newWidth, newHeight := 120, 40 + err = sess.WindowChange(newHeight, newWidth) + require.NoError(t, err) + + _, err = fmt.Fprintln(stdin, "stty size") + require.NoError(t, err) + + // NOTE: Read and discard the command echo line. + reader.ReadString('\n') + + newSizeOutput, err := reader.ReadString('\n') // Read line + require.NoError(t, err) + + assert.Equal(t, fmt.Sprintf("%d %d", newHeight, newWidth), strings.TrimSpace(newSizeOutput)) + }, + }, + { + name: "connection EXEC with invalid command", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + _, err = sess.Output("this-command-does-not-exist") + require.Error(t, err) + + var exitErr *ssh.ExitError + require.ErrorAs(t, err, &exitErr) + assert.NotEqual(t, 0, exitErr.ExitStatus()) + }, + }, + { + name: "handling special characters in commands and output", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + specialChars := "!@#$%^&*()_+{}[]|;:,.<>/?`~" + output, err := sess.Output(fmt.Sprintf("echo -n '%s'", specialChars)) + require.NoError(t, err) + + assert.Equal(t, specialChars, string(output)) + + // Test Unicode characters + unicodeChars := "こんにちは世界 ñáéíóú 你好世界" + sess2, err := conn.NewSession() + require.NoError(t, err) + defer sess2.Close() + + output, err = sess2.Output(fmt.Sprintf("echo -n '%s'", unicodeChars)) + require.NoError(t, err) + + assert.Equal(t, unicodeChars, string(output)) + }, + }, + { + name: "connection with cipher and MAC preferences", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + Config: ssh.Config{ + Ciphers: []string{ + "aes256-ctr", "aes192-ctr", "aes128-ctr", + "aes256-gcm@openssh.com", "aes128-gcm@openssh.com", + }, + MACs: []string{ + "hmac-sha2-256-etm@openssh.com", + "hmac-sha2-512-etm@openssh.com", + "hmac-sha2-256", + "hmac-sha2-512", + }, + }, + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + output, err := sess.Output("echo -n 'cipher test'") + require.NoError(t, err) + assert.Equal(t, "cipher test", string(output)) + }, + }, + { + name: "multiple concurrent SSH sessions", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + const numConnections = 5 + var wg sync.WaitGroup + errors := make(chan error, numConnections) + + for i := range numConnections { + wg.Add(1) + go func(id int) { + defer wg.Done() + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + if err != nil { + errors <- fmt.Errorf("connection %d failed: %w", id, err) + + return + } + defer conn.Close() + + sess, err := conn.NewSession() + if err != nil { + errors <- fmt.Errorf("session %d failed: %w", id, err) + + return + } + defer sess.Close() + + expected := fmt.Sprintf("session-%d", id) + output, err := sess.Output(fmt.Sprintf("echo -n '%s'", expected)) + if err != nil { + errors <- fmt.Errorf("command %d failed: %w", id, err) + + return + } + + if string(output) != expected { + errors <- fmt.Errorf("unexpected output from session %d: got %q, want %q", id, string(output), expected) + } + }(i) + } + + wg.Wait() + close(errors) + + for err := range errors { + require.NoError(t, err) + } + }, + }, + { + name: "connection with strict host key checking simulation", + run: func(t *testing.T, environment *Environment, device *models.Device) { + var learnedKey ssh.PublicKey + config1 := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: func(hostname string, remote net.Addr, key ssh.PublicKey) error { + learnedKey = key + + return nil + }, + } + + conn1, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config1) + require.NoError(t, err) + conn1.Close() + + config2 := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: func(hostname string, remote net.Addr, key ssh.PublicKey) error { + if !bytes.Equal(key.Marshal(), learnedKey.Marshal()) { + return fmt.Errorf("host key mismatch") + } + + return nil + }, + } + + conn2, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config2) + require.NoError(t, err) + defer conn2.Close() + }, + }, + { + name: "connection with keep-alive and heartbeat", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + Timeout: 10 * time.Second, + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + go func() { + ticker := time.NewTicker(2 * time.Second) + defer ticker.Stop() + for range 3 { + <-ticker.C + _, _, err := conn.SendRequest("keepalive@shellhub.io", true, nil) + if err != nil { + t.Logf("Keep-alive failed: %v", err) + + return + } + } + }() + + time.Sleep(8 * time.Second) + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + output, err := sess.Output("echo -n 'alive after keepalive'") + require.NoError(t, err) + assert.Equal(t, "alive after keepalive", string(output)) + }, + }, + { + name: "connection with subsystem request (sftp)", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + err = sess.RequestSubsystem("sftp") + require.NoError(t, err) + + stdin, err := sess.StdinPipe() + require.NoError(t, err) + + stdout, err := sess.StdoutPipe() + require.NoError(t, err) + + initPacket := []byte{0, 0, 0, 5, 1, 0, 0, 0, 3} // SSH_FXP_INIT with version 3 + _, err = stdin.Write(initPacket) + require.NoError(t, err) + + response := make([]byte, 9) + n, err := stdout.Read(response) + require.NoError(t, err) + assert.Equal(t, 9, n) + assert.Equal(t, byte(2), response[4]) // SSH_FXP_VERSION + }, + }, + { + name: "connection with pseudo-terminal modes", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + modes := ssh.TerminalModes{ + ssh.ECHO: 0, // Disable echo + ssh.TTY_OP_ISPEED: 14400, // Input speed + ssh.TTY_OP_OSPEED: 14400, // Output speed + ssh.ICRNL: 1, // Map CR to NL on input + ssh.OPOST: 1, // Enable output processing + } + + err = sess.RequestPty("xterm-256color", 24, 80, modes) + require.NoError(t, err) + + stdin, err := sess.StdinPipe() + require.NoError(t, err) + + stdout, err := sess.StdoutPipe() + require.NoError(t, err) + + err = sess.Shell() + require.NoError(t, err) + + _, err = stdin.Write([]byte("stty -echo && echo 'no echo test' && exit\n")) + require.NoError(t, err) + + buffer := make([]byte, 1024) + n, err := stdout.Read(buffer) + require.NoError(t, err) + assert.Greater(t, n, 0) + }, + }, + { + name: "connection with signal handling", + run: func(t *testing.T, environment *Environment, device *models.Device) { + config := &ssh.ClientConfig{ + User: fmt.Sprintf("%s@%s.%s", ShellHubAgentUsername, ShellHubNamespaceName, device.Name), + Auth: []ssh.AuthMethod{ + ssh.Password(ShellHubAgentPassword), + }, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint:gosec + } + + conn, err := ssh.Dial("tcp", fmt.Sprintf("localhost:%s", environment.services.Env("SHELLHUB_SSH_PORT")), config) + require.NoError(t, err) + defer conn.Close() + + sess, err := conn.NewSession() + require.NoError(t, err) + defer sess.Close() + + err = sess.RequestPty("xterm", 24, 80, ssh.TerminalModes{}) + require.NoError(t, err) + + stdin, err := sess.StdinPipe() + require.NoError(t, err) + + err = sess.Shell() + require.NoError(t, err) + + _, err = stdin.Write([]byte("sleep 30 &\n")) + require.NoError(t, err) + + time.Sleep(100 * time.Millisecond) + + err = sess.Signal(ssh.SIGINT) + if err != nil { + t.Logf("Signal sending not supported: %v", err) + } + + err = sess.Signal(ssh.SIGTERM) + if err != nil { + t.Logf("Signal sending not supported: %v", err) + } + + _, err = stdin.Write([]byte("echo 'signal test done'\n")) + require.NoError(t, err) + }, + }, + } + + ctx := context.Background() + + compose := environment.New(t).Up(ctx) + t.Cleanup(func() { + compose.Down() + }) + + compose.NewUser(t, ShellHubUsername, ShellHubEmail, ShellHubPassword) + compose.NewNamespace(t, ShellHubUsername, ShellHubNamespaceName, ShellHubNamespace) + + auth := models.UserAuthResponse{} + + require.EventuallyWithT(t, func(tt *assert.CollectT) { + resp, err := compose.R(ctx). + SetBody(map[string]string{ + "username": ShellHubUsername, + "password": ShellHubPassword, + }). + SetResult(&auth). + Post("/api/login") + assert.Equal(tt, 200, resp.StatusCode()) + assert.NoError(tt, err) + }, 30*time.Second, 1*time.Second) + + compose.JWT(auth.Token) + + for _, tc := range tests { + test := tc + t.Run(test.name, func(tt *testing.T) { + // Combine connection version with test-specific options + opts := append([]NewAgentContainerOption{ + NewAgentContainerWithConnectionVersion(connectionVersion), + }, test.options...) + + agent, err := NewAgentContainer( + ctx, + compose.Env("SHELLHUB_HTTP_PORT"), + opts..., + ) + require.NoError(tt, err) + + agent.Stop(ctx, nil) + + err = agent.Start(ctx) + require.NoError(tt, err) + + tt.Cleanup(func() { + agent.Stop(context.Background(), nil) + }) + + t.Cleanup(func() { + agent.Terminate(context.Background()) + }) + + devices := []models.Device{} + + require.EventuallyWithT(tt, func(tt *assert.CollectT) { + resp, err := compose.R(ctx).SetResult(&devices). + Get("/api/devices?status=pending") + assert.Equal(tt, 200, resp.StatusCode()) + assert.NoError(tt, err) + + assert.Len(tt, devices, 1) + }, 30*time.Second, 1*time.Second) + + resp, err := compose.R(ctx). + Patch(fmt.Sprintf("/api/devices/%s/accept", devices[0].UID)) + require.Equal(tt, 200, resp.StatusCode()) + require.NoError(tt, err) + + device := models.Device{} + + require.EventuallyWithT(tt, func(tt *assert.CollectT) { + resp, err := compose.R(ctx). + SetResult(&device). + Get(fmt.Sprintf("/api/devices/%s", devices[0].UID)) + assert.Equal(tt, 200, resp.StatusCode()) + assert.NoError(tt, err) + + assert.True(tt, device.Online) + }, 30*time.Second, 1*time.Second) + + // -- + + test.run(tt, &Environment{ + services: compose, + agent: agent, + }, &device) + }) + } +} diff --git a/ui/.env b/ui/.env deleted file mode 100644 index a3b90844fd7..00000000000 --- a/ui/.env +++ /dev/null @@ -1 +0,0 @@ -SHELLHUB_VERSION=0.9.3 \ No newline at end of file diff --git a/ui/.eslintrc.js b/ui/.eslintrc.js deleted file mode 100644 index 504264f1bd0..00000000000 --- a/ui/.eslintrc.js +++ /dev/null @@ -1,94 +0,0 @@ -/* eslint-env node */ -require("@rushstack/eslint-patch/modern-module-resolution"); - -module.exports = { - root: true, - - env: { - node: true, - jest: true, - }, - - extends: [ - "eslint:recommended", - "plugin:vue/vue3-essential", - "@vue/airbnb", - "@vue/eslint-config-typescript/recommended", - ], - - parserOptions: { - ecmaVersion: 2020, - }, - rules: { - quotes: [2, "double", "avoid-escape"], - "no-console": process.env.NODE_ENV === "production" ? "warn" : "off", - "no-debugger": process.env.NODE_ENV === "production" ? "warn" : "off", - "global-require": 0, - indent: [2], - "no-cond-assign": ["error"], - "no-constant-condition": ["error"], - "no-empty-pattern": ["error"], - "no-redeclare": ["error"], - "no-delete-var": ["error"], - "no-var": ["error"], - "import/no-unresolved": "off", - "import/no-extraneous-dependencies": ["error", { peerDependencies: true }], - "import/no-cycle": [0, { ignoreExternal: true }], - "import/extensions": "off", - "vue/max-len": ["error", { code: 140, template: 140 }], - "spaced-comment": [ - 2, - "always", - { - exceptions: ["////"], - markers: ["/"], - }, - ], - "import/no-useless-path-segments": [ - 0, - { - noUselessIndex: true, - }, - ], - "vue/multi-word-component-names": [ - 0, - { - ignores: [], - }, - ], - "no-shadow": [0, { hoist: "never" }], - "no-confusing-arrow": [0, { allowParens: true, onlyOneSimpleParam: false }], - "object-curly-newline": [0, "always"], - "no-plusplus": 0, - "@typescript-eslint/ban-types": [ - "error", - { - extendDefaults: true, - types: { - "{}": false, - }, - }, - ], - }, - - overrides: [ - { - files: ["**/__tests__/*.{j,t}s?(x)", "**/tests/unit/**/*.spec.{j,t}s?(x)"], - env: { - jest: true, - }, - }, - { - files: ["**/__tests__/*.{j,t}s?(x)", "**/tests/unit/**/*.spec.{j,t}s?(x)"], - env: { - jest: true, - }, - }, - { - files: ["**/__tests__/*.{j,t}s?(x)", "**/tests/unit/**/*.spec.{j,t}s?(x)"], - env: { - jest: true, - }, - }, - ], -}; diff --git a/ui/Dockerfile b/ui/Dockerfile index f38a9d2a93e..a47008fa46e 100644 --- a/ui/Dockerfile +++ b/ui/Dockerfile @@ -1,8 +1,8 @@ -FROM node:21.4.0-alpine3.17 as base +FROM node:24.13.0-alpine3.22 AS base ARG NPM_CONFIG_REGISTRY -RUN apk add --update build-base python3 +RUN apk add --update build-base python3 curl git WORKDIR /app @@ -10,7 +10,7 @@ COPY ui/package*.json ./ RUN npm install -FROM base as development +FROM base AS development ARG NPM_CONFIG_REGISTRY ENV NPM_CONFIG_REGISTRY ${NPM_CONFIG_REGISTRY} @@ -21,15 +21,9 @@ COPY --from=base /app/node_modules /node_modules COPY ui/scripts /scripts -RUN apk add openjdk11-jre - -RUN npm install -g @openapitools/openapi-generator-cli - -RUN openapi-generator-cli version-manager set 6.0.0 - CMD ["/scripts/entrypoint-dev.sh"] -FROM base as builder +FROM base AS builder ARG NPM_CONFIG_REGISTRY @@ -41,7 +35,9 @@ COPY --from=base /app/node_modules ./node_modules RUN npm run build -FROM nginx:1.25.3-alpine as production +FROM nginx:1.29.4-alpine AS production + +RUN apk add curl RUN rm /etc/nginx/conf.d/default.conf COPY ui/nginx.conf /etc/nginx/conf.d diff --git a/ui/admin/index.html b/ui/admin/index.html new file mode 100644 index 00000000000..d4de6215c40 --- /dev/null +++ b/ui/admin/index.html @@ -0,0 +1,22 @@ + + + + + + + + + + + Admin - ShellHub + + + +
+ + + + diff --git a/ui/admin/src/App.vue b/ui/admin/src/App.vue new file mode 100644 index 00000000000..6b943e712c6 --- /dev/null +++ b/ui/admin/src/App.vue @@ -0,0 +1,17 @@ + + + diff --git a/ui/admin/src/assets/logo-inverted.svg b/ui/admin/src/assets/logo-inverted.svg new file mode 100644 index 00000000000..0570129f504 --- /dev/null +++ b/ui/admin/src/assets/logo-inverted.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/ui/admin/src/components/Announcement/AnnouncementDelete.vue b/ui/admin/src/components/Announcement/AnnouncementDelete.vue new file mode 100644 index 00000000000..36b02485346 --- /dev/null +++ b/ui/admin/src/components/Announcement/AnnouncementDelete.vue @@ -0,0 +1,65 @@ + + + diff --git a/ui/admin/src/components/Announcement/AnnouncementEdit.vue b/ui/admin/src/components/Announcement/AnnouncementEdit.vue new file mode 100644 index 00000000000..9f41cd594a2 --- /dev/null +++ b/ui/admin/src/components/Announcement/AnnouncementEdit.vue @@ -0,0 +1,149 @@ + + + + + diff --git a/ui/admin/src/components/Announcement/AnnouncementList.vue b/ui/admin/src/components/Announcement/AnnouncementList.vue new file mode 100644 index 00000000000..e8ced43b989 --- /dev/null +++ b/ui/admin/src/components/Announcement/AnnouncementList.vue @@ -0,0 +1,149 @@ + + + diff --git a/ui/admin/src/components/Device/DeviceList.vue b/ui/admin/src/components/Device/DeviceList.vue new file mode 100644 index 00000000000..dab131182a1 --- /dev/null +++ b/ui/admin/src/components/Device/DeviceList.vue @@ -0,0 +1,228 @@ + + + + + diff --git a/ui/admin/src/components/FirewallRules/FirewallRulesList.vue b/ui/admin/src/components/FirewallRules/FirewallRulesList.vue new file mode 100644 index 00000000000..ae03aafa7db --- /dev/null +++ b/ui/admin/src/components/FirewallRules/FirewallRulesList.vue @@ -0,0 +1,169 @@ + + + diff --git a/ui/admin/src/components/Instance/SSO/ConfigureSSO.vue b/ui/admin/src/components/Instance/SSO/ConfigureSSO.vue new file mode 100644 index 00000000000..52cc0afaa12 --- /dev/null +++ b/ui/admin/src/components/Instance/SSO/ConfigureSSO.vue @@ -0,0 +1,416 @@ + + + diff --git a/ui/admin/src/components/Namespace/NamespaceDelete.vue b/ui/admin/src/components/Namespace/NamespaceDelete.vue new file mode 100644 index 00000000000..26d87f56983 --- /dev/null +++ b/ui/admin/src/components/Namespace/NamespaceDelete.vue @@ -0,0 +1,63 @@ + + + diff --git a/ui/admin/src/components/Namespace/NamespaceEdit.vue b/ui/admin/src/components/Namespace/NamespaceEdit.vue new file mode 100644 index 00000000000..5c01068876b --- /dev/null +++ b/ui/admin/src/components/Namespace/NamespaceEdit.vue @@ -0,0 +1,126 @@ + + + diff --git a/ui/admin/src/components/Namespace/NamespaceExport.vue b/ui/admin/src/components/Namespace/NamespaceExport.vue new file mode 100644 index 00000000000..af7bde56824 --- /dev/null +++ b/ui/admin/src/components/Namespace/NamespaceExport.vue @@ -0,0 +1,130 @@ + + + diff --git a/ui/admin/src/components/Namespace/NamespaceList.vue b/ui/admin/src/components/Namespace/NamespaceList.vue new file mode 100644 index 00000000000..005efdc00ad --- /dev/null +++ b/ui/admin/src/components/Namespace/NamespaceList.vue @@ -0,0 +1,237 @@ + + + + + diff --git a/ui/admin/src/components/Sessions/SessionList.vue b/ui/admin/src/components/Sessions/SessionList.vue new file mode 100644 index 00000000000..52a0ea488ea --- /dev/null +++ b/ui/admin/src/components/Sessions/SessionList.vue @@ -0,0 +1,235 @@ + + + + + diff --git a/ui/admin/src/components/Settings/SettingsAuthentication.vue b/ui/admin/src/components/Settings/SettingsAuthentication.vue new file mode 100644 index 00000000000..e0824d57f8c --- /dev/null +++ b/ui/admin/src/components/Settings/SettingsAuthentication.vue @@ -0,0 +1,242 @@ + + + + + diff --git a/ui/admin/src/components/Settings/SettingsLicense.vue b/ui/admin/src/components/Settings/SettingsLicense.vue new file mode 100644 index 00000000000..d51d2c336f6 --- /dev/null +++ b/ui/admin/src/components/Settings/SettingsLicense.vue @@ -0,0 +1,260 @@ + + + + + diff --git a/ui/admin/src/components/User/UserDelete.vue b/ui/admin/src/components/User/UserDelete.vue new file mode 100644 index 00000000000..1331add5477 --- /dev/null +++ b/ui/admin/src/components/User/UserDelete.vue @@ -0,0 +1,67 @@ + + + diff --git a/ui/admin/src/components/User/UserExport.vue b/ui/admin/src/components/User/UserExport.vue new file mode 100644 index 00000000000..7d209a26d77 --- /dev/null +++ b/ui/admin/src/components/User/UserExport.vue @@ -0,0 +1,123 @@ + + + diff --git a/ui/admin/src/components/User/UserFormDialog.vue b/ui/admin/src/components/User/UserFormDialog.vue new file mode 100644 index 00000000000..aa21d9853ce --- /dev/null +++ b/ui/admin/src/components/User/UserFormDialog.vue @@ -0,0 +1,338 @@ + + + diff --git a/ui/admin/src/components/User/UserList.vue b/ui/admin/src/components/User/UserList.vue new file mode 100644 index 00000000000..c930fe28b3b --- /dev/null +++ b/ui/admin/src/components/User/UserList.vue @@ -0,0 +1,196 @@ + + + diff --git a/ui/admin/src/components/User/UserResetPassword.vue b/ui/admin/src/components/User/UserResetPassword.vue new file mode 100644 index 00000000000..157ce282cec --- /dev/null +++ b/ui/admin/src/components/User/UserResetPassword.vue @@ -0,0 +1,125 @@ + + + diff --git a/ui/admin/src/components/User/UserStatusChip.vue b/ui/admin/src/components/User/UserStatusChip.vue new file mode 100644 index 00000000000..6491f81714c --- /dev/null +++ b/ui/admin/src/components/User/UserStatusChip.vue @@ -0,0 +1,29 @@ + + + diff --git a/ui/admin/src/env.d.ts b/ui/admin/src/env.d.ts new file mode 100644 index 00000000000..3c8a7ab579a --- /dev/null +++ b/ui/admin/src/env.d.ts @@ -0,0 +1,7 @@ +/// + +declare module "*.vue" { + import type { DefineComponent } from "vue"; + const component: DefineComponent; + export default component; +} diff --git a/ui/admin/src/interfaces/IAnnouncement.ts b/ui/admin/src/interfaces/IAnnouncement.ts new file mode 100644 index 00000000000..cfb1dc94ed3 --- /dev/null +++ b/ui/admin/src/interfaces/IAnnouncement.ts @@ -0,0 +1,7 @@ +import { IAnnouncement, IAnnouncementShort } from "@/interfaces/IAnnouncement"; + +export type IAdminAnnouncementShort = IAnnouncementShort; + +export type IAdminAnnouncement = IAnnouncement; + +export type IAdminAnnouncementRequestBody = Pick; diff --git a/ui/admin/src/interfaces/IDevice.ts b/ui/admin/src/interfaces/IDevice.ts new file mode 100644 index 00000000000..10334207f42 --- /dev/null +++ b/ui/admin/src/interfaces/IDevice.ts @@ -0,0 +1,3 @@ +import { IDevice } from "@/interfaces/IDevice"; + +export type IAdminDevice = IDevice; diff --git a/ui/admin/src/interfaces/IFilter.ts b/ui/admin/src/interfaces/IFilter.ts new file mode 100644 index 00000000000..4d63e68f86e --- /dev/null +++ b/ui/admin/src/interfaces/IFilter.ts @@ -0,0 +1,20 @@ +import { HostnameFilter, TagsFilter } from "@/interfaces/IFilter"; + +export enum AdminNamespaceFilterOptions { + MoreThan = "moreThan", + NoDevices = "noDevices", + NoSessions = "noSessions", +} + +export interface IAdminExportFilter { + type: "property" | "operator"; + params: { + name?: string; + operator?: string; + value?: number; + }; +} + +export type AdminHostnameFilter = HostnameFilter; +export type AdminTagsFilter = TagsFilter; +export type AdminFilter = AdminHostnameFilter | AdminTagsFilter; diff --git a/ui/admin/src/interfaces/IFirewallRule.ts b/ui/admin/src/interfaces/IFirewallRule.ts new file mode 100644 index 00000000000..c53dd40e995 --- /dev/null +++ b/ui/admin/src/interfaces/IFirewallRule.ts @@ -0,0 +1,3 @@ +import { IFirewallRule } from "@/interfaces/IFirewallRule"; + +export type IAdminFirewallRule = Omit; diff --git a/ui/admin/src/interfaces/IInstance.ts b/ui/admin/src/interfaces/IInstance.ts new file mode 100644 index 00000000000..38bcdb3e392 --- /dev/null +++ b/ui/admin/src/interfaces/IInstance.ts @@ -0,0 +1,52 @@ +// Binding needs to receive either post or redirect URL, or both. +type SAMLBinding = ({ + post: string, +} | { + redirect: string, +} | { + post: string, + redirect: string, +}) & { + preferred?: "post" | "redirect", +}; + +export interface IAdminUpdateSAML { + enable: boolean; + idp: { + metadata_url?: string, + entity_id?: string, + binding?: SAMLBinding, + certificate?: string, + mappings?: { + email: string, + name: string, + }, + }; + sp: { + sign_requests?: boolean + } +} + +export interface IAdminSAML { + enabled: boolean, + idp: { + entity_id: string, + binding: SAMLBinding, + certificates: Array, + mappings?: { + email: string, + name: string, + } + }, + sp: { + certificate: string + sign_auth_requests: boolean + }, + auth_url: string, + assertion_url: string, +} + +export interface IAdminAuth { + local: { enabled: boolean }; + saml: IAdminSAML; +} diff --git a/ui/admin/src/interfaces/ILicense.ts b/ui/admin/src/interfaces/ILicense.ts new file mode 100644 index 00000000000..18857c0b2ff --- /dev/null +++ b/ui/admin/src/interfaces/ILicense.ts @@ -0,0 +1,279 @@ +enum AllowedRegions { + Ad = "AD", + Ae = "AE", + Af = "AF", + Ag = "AG", + Ai = "AI", + Al = "AL", + Am = "AM", + Ao = "AO", + Aq = "AQ", + Ar = "AR", + As = "AS", + At = "AT", + Au = "AU", + Aw = "AW", + Ax = "AX", + Az = "AZ", + Ba = "BA", + Bb = "BB", + Bd = "BD", + Be = "BE", + Bf = "BF", + Bg = "BG", + Bh = "BH", + Bi = "BI", + Bj = "BJ", + Bl = "BL", + Bm = "BM", + Bn = "BN", + Bo = "BO", + Bq = "BQ", + Br = "BR", + Bs = "BS", + Bt = "BT", + Bv = "BV", + Bw = "BW", + By = "BY", + Bz = "BZ", + Ca = "CA", + Cc = "CC", + Cd = "CD", + Cf = "CF", + Cg = "CG", + Ch = "CH", + Ci = "CI", + Ck = "CK", + Cl = "CL", + Cm = "CM", + Cn = "CN", + Co = "CO", + Cr = "CR", + Cu = "CU", + Cv = "CV", + Cw = "CW", + Cx = "CX", + Cy = "CY", + Cz = "CZ", + De = "DE", + Dj = "DJ", + Dk = "DK", + Dm = "DM", + Do = "DO", + Dz = "DZ", + Ec = "EC", + Ee = "EE", + Eg = "EG", + Eh = "EH", + Er = "ER", + Es = "ES", + Et = "ET", + Fi = "FI", + Fj = "FJ", + Fk = "FK", + Fm = "FM", + Fo = "FO", + Fr = "FR", + Ga = "GA", + Gb = "GB", + Gd = "GD", + Ge = "GE", + Gf = "GF", + Gg = "GG", + Gh = "GH", + Gi = "GI", + Gl = "GL", + Gm = "GM", + Gn = "GN", + Gp = "GP", + Gq = "GQ", + Gr = "GR", + Gs = "GS", + Gt = "GT", + Gu = "GU", + Gw = "GW", + Gy = "GY", + Hk = "HK", + Hm = "HM", + Hn = "HN", + Hr = "HR", + Ht = "HT", + Hu = "HU", + Id = "ID", + Ie = "IE", + Il = "IL", + Im = "IM", + In = "IN", + Io = "IO", + Iq = "IQ", + Ir = "IR", + Is = "IS", + It = "IT", + Je = "JE", + Jm = "JM", + Jo = "JO", + Jp = "JP", + Ke = "KE", + Kg = "KG", + Kh = "KH", + Ki = "KI", + Km = "KM", + Kn = "KN", + Kp = "KP", + Kr = "KR", + Kw = "KW", + Ky = "KY", + Kz = "KZ", + La = "LA", + Lb = "LB", + Lc = "LC", + Li = "LI", + Lk = "LK", + Lr = "LR", + Ls = "LS", + Lt = "LT", + Lu = "LU", + Lv = "LV", + Ly = "LY", + Ma = "MA", + Mc = "MC", + Md = "MD", + Me = "ME", + Mf = "MF", + Mg = "MG", + Mh = "MH", + Mk = "MK", + Ml = "ML", + Mm = "MM", + Mn = "MN", + Mo = "MO", + Mp = "MP", + Mq = "MQ", + Mr = "MR", + Ms = "MS", + Mt = "MT", + Mu = "MU", + Mv = "MV", + Mw = "MW", + Mx = "MX", + My = "MY", + Mz = "MZ", + Na = "NA", + Nc = "NC", + Ne = "NE", + Nf = "NF", + Ng = "NG", + Ni = "NI", + Nl = "NL", + No = "NO", + Np = "NP", + Nr = "NR", + Nu = "NU", + Nz = "NZ", + Om = "OM", + Pa = "PA", + Pe = "PE", + Pf = "PF", + Pg = "PG", + Ph = "PH", + Pk = "PK", + Pl = "PL", + Pm = "PM", + Pn = "PN", + Pr = "PR", + Ps = "PS", + Pt = "PT", + Pw = "PW", + Py = "PY", + Qa = "QA", + Re = "RE", + Ro = "RO", + Rs = "RS", + Ru = "RU", + Rw = "RW", + Sa = "SA", + Sb = "SB", + Sc = "SC", + Sd = "SD", + Se = "SE", + Sg = "SG", + Sh = "SH", + Si = "SI", + Sj = "SJ", + Sk = "SK", + Sl = "SL", + Sm = "SM", + Sn = "SN", + So = "SO", + Sr = "SR", + Ss = "SS", + St = "ST", + Sv = "SV", + Sx = "SX", + Sy = "SY", + Sz = "SZ", + Tc = "TC", + Td = "TD", + Tf = "TF", + Tg = "TG", + Th = "TH", + Tj = "TJ", + Tk = "TK", + Tm = "TM", + Tn = "TN", + To = "TO", + Tr = "TR", + Tt = "TT", + Tv = "TV", + Tw = "TW", + Tz = "TZ", + Ua = "UA", + Ug = "UG", + Um = "UM", + Us = "US", + Uy = "UY", + Uz = "UZ", + Va = "VA", + Vc = "VC", + Ve = "VE", + Vg = "VG", + Vi = "VI", + Vn = "VN", + Vu = "VU", + Wf = "WF", + Ws = "WS", + Ye = "YE", + Yt = "YT", + Za = "ZA", + Zm = "ZM", + Zw = "ZW", +} + +type Customer = { + id?: string; + name?: string; + email?: string; + company?: string; +}; + +export type AdminLicenseFeatures = { + devices: number; + session_recording: boolean; + firewall_rules: boolean; + reports?: boolean; + login_link?: boolean; + billing: boolean; +}; + +export interface IAdminLicense { + id: string; + expired: boolean; + about_to_expire: boolean; + grace_period: boolean; + issued_at: number; + starts_at: number; + expires_at: number; + allowed_regions: Array; + customer: Customer; + features: AdminLicenseFeatures; +} diff --git a/ui/admin/src/interfaces/INamespace.ts b/ui/admin/src/interfaces/INamespace.ts new file mode 100644 index 00000000000..2ca6ac0c3e8 --- /dev/null +++ b/ui/admin/src/interfaces/INamespace.ts @@ -0,0 +1,17 @@ +import { INamespace } from "@/interfaces/INamespace"; + +interface IAdminBilling { + active: boolean; + current_period_end: string; + customer_id: string; + payment_failed: null | string | boolean; + payment_method_id: string; + price_id: string; + state: string; + sub_item_id: string; + subscription_id: string; +} + +export interface IAdminNamespace extends Omit { + billing?: IAdminBilling; +} diff --git a/ui/admin/src/interfaces/ISession.ts b/ui/admin/src/interfaces/ISession.ts new file mode 100644 index 00000000000..5c48d9274f0 --- /dev/null +++ b/ui/admin/src/interfaces/ISession.ts @@ -0,0 +1,3 @@ +import { ISession } from "@/interfaces/ISession"; + +export type IAdminSession = ISession; diff --git a/ui/admin/src/interfaces/IStats.ts b/ui/admin/src/interfaces/IStats.ts new file mode 100644 index 00000000000..01c804fb806 --- /dev/null +++ b/ui/admin/src/interfaces/IStats.ts @@ -0,0 +1,5 @@ +import { IStats } from "@/interfaces/IStats"; + +export interface IAdminStats extends IStats { + registered_users: number; +} diff --git a/ui/admin/src/interfaces/IUser.ts b/ui/admin/src/interfaces/IUser.ts new file mode 100644 index 00000000000..27292c3d0d2 --- /dev/null +++ b/ui/admin/src/interfaces/IUser.ts @@ -0,0 +1,33 @@ +export type UserStatus = "confirmed" | "invited" | "not-confirmed"; + +export type UserAuthMethods = Array<"saml" | "local">; + +export interface IAdminUser { + id: string; + status: UserStatus; + max_namespaces: number; + created_at: string; + last_login: string; + name: string; + username: string; + email: string; + recovery_email?: string | null; + mfa?: { enabled?: boolean } | null; + namespacesOwned: number; + preferences?: { + auth_methods: UserAuthMethods; + }; + email_marketing?: boolean | null; + admin?: boolean; +} + +export interface IAdminUserFormData { + name: string; + email: string; + username: string; + password: string; + max_namespaces?: number; + status: UserStatus; + id?: string; + admin?: boolean; +} diff --git a/ui/admin/src/layouts/AppLayout.vue b/ui/admin/src/layouts/AppLayout.vue new file mode 100644 index 00000000000..20d4e3f0ff2 --- /dev/null +++ b/ui/admin/src/layouts/AppLayout.vue @@ -0,0 +1,362 @@ + + + + + diff --git a/ui/admin/src/main.ts b/ui/admin/src/main.ts new file mode 100644 index 00000000000..2dd9ade451c --- /dev/null +++ b/ui/admin/src/main.ts @@ -0,0 +1,17 @@ +import { createApp, type Plugin } from "vue"; +import { createPinia } from "pinia"; +import vuetify from "@/plugins/vuetify"; +import App from "./App.vue"; +import loadFonts from "@/plugins/webfontloader"; +import router from "./router"; +import { SnackbarPlugin } from "@/plugins/snackbar"; + +const pinia = createPinia(); +const app = createApp(App); + +loadFonts(); +app.use(vuetify as Plugin); +app.use(router); +app.use(pinia); +app.use(SnackbarPlugin); +app.mount("#app"); diff --git a/ui/admin/src/router/index.ts b/ui/admin/src/router/index.ts new file mode 100644 index 00000000000..f127a304275 --- /dev/null +++ b/ui/admin/src/router/index.ts @@ -0,0 +1,165 @@ +import { createRouter, createWebHistory, NavigationGuardNext, RouteLocationNormalized } from "vue-router"; +import Dashboard from "@admin/views/Dashboard.vue"; +import Users from "@admin/views/Users.vue"; +import SettingsLicense from "@admin/components/Settings/SettingsLicense.vue"; +import SettingsAuthentication from "@admin/components/Settings/SettingsAuthentication.vue"; +import Namespaces from "@admin/views/Namespaces.vue"; +import Settings from "@admin/views/Settings.vue"; +import Unauthorized from "@admin/views/Unauthorized.vue"; +import useLicenseStore from "@admin/store/modules/license"; +import useAuthStore from "@admin/store/modules/auth"; +import { plugin as snackbar } from "@/plugins/snackbar"; // using direct plugin because inject() doesn't work outside components + +export const routes = [ + { + path: "/unauthorized", + name: "Unauthorized", + component: Unauthorized, + meta: { + requiresAuth: true, + requiresAdmin: false, + }, + }, + { + path: "/", + name: "dashboard", + component: Dashboard, + }, + { + path: "/users", + name: "users", + component: Users, + }, + { + path: "/user/:id", + name: "userDetails", + component: () => import("@admin/views/UserDetails.vue"), + }, + { + path: "/devices", + name: "devices", + component: () => import("@admin/views/Device.vue"), + redirect: { + name: "listDevices", + }, + children: [ + { + path: "", + name: "listDevices", + component: () => import("@admin/views/Device.vue"), + }, + ], + }, + { + path: "/device/:id", + name: "deviceDetails", + component: () => import("@admin/views/DeviceDetails.vue"), + }, + { + path: "/sessions", + name: "sessions", + component: () => import("@admin/views/Sessions.vue"), + }, + { + path: "/session/:id", + name: "sessionDetails", + component: () => import("@admin/views/SessionDetails.vue"), + }, + { + path: "/settings", + name: "Settings", + component: Settings, + redirect: { name: "SettingProfile" }, + children: [ + { + path: "authentication", + name: "SettingAuthentication", + component: SettingsAuthentication, + }, + { + path: "license", + name: "SettingLicense", + component: SettingsLicense, + }, + ], + }, + { + path: "/firewall-rules", + name: "firewall-rules", + component: () => import("@admin/views/FirewallRules.vue"), + }, + { + path: "/firewall-rules/:id", + name: "firewallRulesDetails", + component: () => import("@admin/views/FirewallRulesDetails.vue"), + }, + { + path: "/namespaces", + name: "namespaces", + component: Namespaces, + }, + { + path: "/namespace/:id", + name: "namespaceDetails", + component: () => import("@admin/views/NamespaceDetails.vue"), + }, + { + path: "/announcements", + name: "announcements", + component: () => import("@admin/views/Announcements.vue"), + }, + { + path: "/announcement/:uuid", + name: "announcementDetails", + component: () => import("@admin/views/AnnouncementDetails.vue"), + }, + { + path: "/new-announcement", + name: "new-announcement", + component: () => import("@admin/views/NewAnnouncement.vue"), + }, +]; + +const router = createRouter({ + history: createWebHistory("/admin/"), + routes, +}); + +router.beforeEach( + async (to: RouteLocationNormalized, from: RouteLocationNormalized, next: NavigationGuardNext) => { + const licenseStore = useLicenseStore(); + const authStore = useAuthStore(); + + const requiresAuth = to.meta.requiresAuth ?? true; + const requiresAdmin = to.meta.requiresAdmin ?? true; + + if (!authStore.isLoggedIn && requiresAuth) { + window.location.href = `/login?redirect=${encodeURIComponent(to.fullPath)}`; + return next(); + } + + if (authStore.isLoggedIn && requiresAdmin) { + if (!authStore.isAdmin && to.name !== "Unauthorized") return next({ name: "Unauthorized" }); + + const { license, getLicense } = licenseStore; + + try { + await getLicense(); + + if (license.expired && to.name !== "SettingLicense") { + snackbar.showError("Your license has expired. Please update it and try again."); + return next({ name: "SettingLicense" }); + } + } catch { + if (to.name !== "SettingLicense") { + snackbar.showError("Failed to get your license info. Please check it and try again."); + return next({ name: "SettingLicense" }); + } + } + } + + return next(); + }, +); + +export default router; diff --git a/ui/admin/src/store/api/announcement.ts b/ui/admin/src/store/api/announcement.ts new file mode 100644 index 00000000000..b7700f73273 --- /dev/null +++ b/ui/admin/src/store/api/announcement.ts @@ -0,0 +1,21 @@ +import { IAdminAnnouncementRequestBody } from "@admin/interfaces/IAnnouncement"; +import { adminApi } from "@/api/http"; + +export const createAnnouncement = async ( + announcement: IAdminAnnouncementRequestBody, +) => adminApi.createAnnouncement(announcement); + +export const updateAnnouncement = async ( + uuid: string, + announcement: IAdminAnnouncementRequestBody, +) => adminApi.updateAnnouncement(uuid, announcement); + +export const deleteAnnouncement = async (uuid: string) => adminApi.deleteAnnouncement(uuid); + +export const fetchAnnouncementList = async ( + page: number, + perPage: number, + orderBy: "asc" | "desc", +) => adminApi.listAnnouncementsAdmin(page, perPage, orderBy); + +export const getAnnouncement = async (uuid: string) => adminApi.getAnnouncementAdmin(uuid); diff --git a/ui/admin/src/store/api/auth.ts b/ui/admin/src/store/api/auth.ts new file mode 100644 index 00000000000..f2e28135e65 --- /dev/null +++ b/ui/admin/src/store/api/auth.ts @@ -0,0 +1,11 @@ +import { adminApi } from "@/api/http"; + +type UserLogin = { + username: string; + password: string; +}; + +const login = async (user: UserLogin) => adminApi.loginAdmin(user); +const getToken = async (tenant: string) => adminApi.getUserTokenAdmin(tenant); + +export { login, getToken }; diff --git a/ui/admin/src/store/api/devices.ts b/ui/admin/src/store/api/devices.ts new file mode 100644 index 00000000000..e9b941e1274 --- /dev/null +++ b/ui/admin/src/store/api/devices.ts @@ -0,0 +1,18 @@ +import { adminApi } from "@/api/http"; + +export const getDevices = ( + page: number, + perPage: number, + filter?: string, + sortField?: string, + sortOrder?: "asc" | "desc", +) => adminApi.getDevicesAdmin( + filter, + page, + perPage, + undefined, // status + sortField, + sortOrder, +); + +export const getDevice = (uid: string) => adminApi.getDeviceAdmin(uid); diff --git a/ui/admin/src/store/api/firewall_rules.ts b/ui/admin/src/store/api/firewall_rules.ts new file mode 100644 index 00000000000..a6c0d8b226a --- /dev/null +++ b/ui/admin/src/store/api/firewall_rules.ts @@ -0,0 +1,5 @@ +import { adminApi } from "@/api/http"; + +export const fetchFirewalls = async (page: number, perPage: number) => adminApi.getFirewallRulesAdmin(page, perPage); + +export const getFirewall = async (id: string) => adminApi.getFirewallRuleAdmin(id); diff --git a/ui/admin/src/store/api/instance.ts b/ui/admin/src/store/api/instance.ts new file mode 100644 index 00000000000..bf3c22ebc0e --- /dev/null +++ b/ui/admin/src/store/api/instance.ts @@ -0,0 +1,14 @@ +import { IAdminUpdateSAML } from "@admin/interfaces/IInstance"; +import { adminApi } from "@/api/http"; + +const getAuthenticationSettings = async () => adminApi.getAuthenticationSettings(); + +const configureLocalAuthentication = async ( + status: boolean, +) => adminApi.configureLocalAuthentication({ enable: status }); + +const configureSAMLAuthentication = async ( + data: IAdminUpdateSAML, +) => adminApi.configureSAMLAuthentication(data); + +export { getAuthenticationSettings, configureLocalAuthentication, configureSAMLAuthentication }; diff --git a/ui/admin/src/store/api/license.ts b/ui/admin/src/store/api/license.ts new file mode 100644 index 00000000000..2f7cd876222 --- /dev/null +++ b/ui/admin/src/store/api/license.ts @@ -0,0 +1,26 @@ +import axios from "axios"; +import { adminApi } from "@/api/http"; + +const getLicense = async () => adminApi.getLicense(); + +const uploadLicense = async (file: File) => { + const formData = new FormData(); + formData.append("file", file); + + const token = localStorage.getItem("token"); + + const response = await axios.post( + `${window.location.origin}/admin/api/license`, + formData, + { + headers: { + Authorization: `Bearer ${token}`, + "Content-Type": "multipart/form-data", + }, + }, + ); + + return response; +}; + +export { getLicense, uploadLicense }; diff --git a/ui/admin/src/store/api/namespaces.ts b/ui/admin/src/store/api/namespaces.ts new file mode 100644 index 00000000000..5b0e5ede76a --- /dev/null +++ b/ui/admin/src/store/api/namespaces.ts @@ -0,0 +1,20 @@ +import { IAdminNamespace } from "@admin/interfaces/INamespace"; +import { adminApi } from "@/api/http"; + +export const fetchNamespaces = async ( + page: number, + perPage: number, + filter?: string, +) => adminApi.getNamespacesAdmin(filter, page, perPage); + +export const exportNamespaces = async (filter: string) => adminApi.exportNamespaces(filter); + +export const getNamespace = async (id: string) => adminApi.getNamespaceAdmin(id); + +export const deleteNamespace = async (tenant: string) => adminApi.deleteNamespaceAdmin(tenant); + +export const updateNamespace = async ( + data: IAdminNamespace, +) => adminApi.editNamespaceAdmin(data.tenant_id, { + ...data, +}); diff --git a/ui/admin/src/store/api/sessions.ts b/ui/admin/src/store/api/sessions.ts new file mode 100644 index 00000000000..3c4b4876cf1 --- /dev/null +++ b/ui/admin/src/store/api/sessions.ts @@ -0,0 +1,7 @@ +import { adminApi } from "@/api/http"; + +const fetchSessions = async (perPage: number, page: number) => adminApi.getSessionsAdmin(page, perPage); + +const getSession = async (uid: string) => adminApi.getSessionAdmin(uid); + +export { fetchSessions, getSession }; diff --git a/ui/admin/src/store/api/stats.ts b/ui/admin/src/store/api/stats.ts new file mode 100644 index 00000000000..5ff99656af8 --- /dev/null +++ b/ui/admin/src/store/api/stats.ts @@ -0,0 +1,5 @@ +import { adminApi } from "@/api/http"; + +const getAdminStats = async () => adminApi.getStats(); + +export default getAdminStats; diff --git a/ui/admin/src/store/api/users.ts b/ui/admin/src/store/api/users.ts new file mode 100644 index 00000000000..d3dd4fb14f3 --- /dev/null +++ b/ui/admin/src/store/api/users.ts @@ -0,0 +1,36 @@ +import { IAdminUserFormData } from "@admin/interfaces/IUser"; +import { UserAdminRequest } from "@/api/client"; +import { adminApi } from "@/api/http"; + +export const fetchUsers = async ( + page: number, + perPage: number, + search?: string, +) => adminApi.getUsers(search, page, perPage); + +export const getUser = (id: string) => adminApi.getUser(id); + +export const exportUsers = async (filter: string) => adminApi.exportUsers(filter); + +export const addUser = (userData: IAdminUserFormData) => adminApi.createUserAdmin({ + name: userData.name, + email: userData.email, + username: userData.username, + password: userData.password, + max_namespaces: userData.max_namespaces, + admin: userData.admin, +}); + +export const updateUser = async (id: string, userData: IAdminUserFormData) => adminApi.adminUpdateUser(id, { + name: userData.name, + email: userData.email, + username: userData.username, + password: userData.password, + status: userData.status, + max_namespaces: userData.max_namespaces, + admin: userData.admin, +} as UserAdminRequest); + +export const resetUserPassword = async (id: string) => adminApi.adminResetUserPassword(id); + +export const deleteUser = (id: string) => adminApi.adminDeleteUser(id); diff --git a/ui/admin/src/store/modules/announcement.ts b/ui/admin/src/store/modules/announcement.ts new file mode 100644 index 00000000000..5043313f63e --- /dev/null +++ b/ui/admin/src/store/modules/announcement.ts @@ -0,0 +1,48 @@ +import { defineStore } from "pinia"; +import { ref } from "vue"; +import { IAdminAnnouncement, IAdminAnnouncementRequestBody, IAdminAnnouncementShort } from "@admin/interfaces/IAnnouncement"; +import * as announcementApi from "../api/announcement"; + +const useAnnouncementStore = defineStore("adminAnnouncement", () => { + const announcements = ref>([]); + const announcement = ref({} as IAdminAnnouncement); + const announcementCount = ref(0); + + const createAnnouncement = async (announcementData: IAdminAnnouncementRequestBody) => { + const { data } = await announcementApi.createAnnouncement(announcementData); + announcement.value = data as IAdminAnnouncement; + }; + + const updateAnnouncement = async (uuid: string, announcementData: IAdminAnnouncementRequestBody) => { + await announcementApi.updateAnnouncement(uuid, announcementData); + }; + + const fetchAnnouncement = async (uuid: string) => { + const { data } = await announcementApi.getAnnouncement(uuid); + announcement.value = data as IAdminAnnouncement; + }; + + const fetchAnnouncementList = async (data: { page: number; perPage: number; orderBy: "asc" | "desc"; }) => { + const res = await announcementApi.fetchAnnouncementList(data.page, data.perPage, data.orderBy); + announcements.value = res.data as IAdminAnnouncementShort[] ?? []; + announcementCount.value = parseInt(res.headers["x-total-count"] as string, 10); + }; + + const deleteAnnouncement = async (uuid: string) => { + const { data } = await announcementApi.deleteAnnouncement(uuid); + announcement.value = data as IAdminAnnouncement; + }; + + return { + announcements, + announcement, + announcementCount, + createAnnouncement, + updateAnnouncement, + fetchAnnouncement, + fetchAnnouncementList, + deleteAnnouncement, + }; +}); + +export default useAnnouncementStore; diff --git a/ui/admin/src/store/modules/auth.ts b/ui/admin/src/store/modules/auth.ts new file mode 100644 index 00000000000..df0a4c9803e --- /dev/null +++ b/ui/admin/src/store/modules/auth.ts @@ -0,0 +1,42 @@ +import { defineStore } from "pinia"; +import { ref, computed } from "vue"; +import * as authApi from "../api/auth"; + +const useAuthStore = defineStore("adminAuth", () => { + const status = ref(""); + const token = ref(localStorage.getItem("token") || ""); + const currentUser = ref(localStorage.getItem("user") || ""); + const isAdmin = ref(localStorage.getItem("admin") === "true"); + const isLoggedIn = computed(() => !!token.value); + + const getLoginToken = async (userId: string) => { + try { + const resp = await authApi.getToken(userId); + return resp.data.token; + } catch (error) { + status.value = "error"; + throw error; + } + }; + + const logout = () => { + status.value = ""; + token.value = ""; + currentUser.value = ""; + + localStorage.removeItem("token"); + localStorage.removeItem("user"); + }; + + return { + status, + token, + currentUser, + isAdmin, + isLoggedIn, + getLoginToken, + logout, + }; +}); + +export default useAuthStore; diff --git a/ui/admin/src/store/modules/devices.ts b/ui/admin/src/store/modules/devices.ts new file mode 100644 index 00000000000..fdb0999e815 --- /dev/null +++ b/ui/admin/src/store/modules/devices.ts @@ -0,0 +1,59 @@ +import { defineStore } from "pinia"; +import { ref } from "vue"; +import { IAdminDevice } from "@admin/interfaces/IDevice"; +import * as devicesApi from "../api/devices"; + +const useDevicesStore = defineStore("adminDevices", () => { + const devices = ref([]); + const deviceCount = ref(0); + const device = ref({} as IAdminDevice); + + const currentFilter = ref(""); + const currentSortField = ref(undefined); + const currentSortOrder = ref<"asc" | "desc" | undefined>(undefined); + + const setFilter = (filter: string) => { currentFilter.value = filter || ""; }; + const setSort = (field?: string, order?: "asc" | "desc") => { + currentSortField.value = field; + currentSortOrder.value = order; + }; + + const fetchDeviceList = async (data?: { + page?: number; + perPage?: number; + filter?: string; + sortField?: string; + sortOrder?: "asc" | "desc"; + }) => { + const page = data?.page || 1; + const perPage = data?.perPage || 10; + const filter = data?.filter ?? currentFilter.value ?? ""; + const sortField = data?.sortField ?? currentSortField.value; + const sortOrder = data?.sortOrder ?? currentSortOrder.value; + + const res = await devicesApi.getDevices(page, perPage, filter, sortField, sortOrder); + devices.value = res.data as unknown as IAdminDevice[]; + deviceCount.value = parseInt(res.headers["x-total-count"] as string, 10); + }; + + const fetchDeviceById = async (uid: string) => { + const res = await devicesApi.getDevice(uid); + device.value = res.data as IAdminDevice; + }; + + return { + devices, + deviceCount, + device, + currentFilter, + currentSortField, + currentSortOrder, + setFilter, + setSort, + + fetchDeviceList, + fetchDeviceById, + }; +}); + +export default useDevicesStore; diff --git a/ui/admin/src/store/modules/firewall_rules.ts b/ui/admin/src/store/modules/firewall_rules.ts new file mode 100644 index 00000000000..1e6e500ccc9 --- /dev/null +++ b/ui/admin/src/store/modules/firewall_rules.ts @@ -0,0 +1,31 @@ +import { defineStore } from "pinia"; +import { ref } from "vue"; +import { IAdminFirewallRule } from "@admin/interfaces/IFirewallRule"; +import * as firewallRulesApi from "../api/firewall_rules"; + +const useFirewallRulesStore = defineStore("adminFirewallRules", () => { + const firewallRules = ref>([]); + const firewallRulesCount = ref(0); + const firewallRule = ref({} as IAdminFirewallRule); + + const fetchFirewallRulesList = async (data?: { page: number; perPage: number }) => { + const res = await firewallRulesApi.fetchFirewalls(data?.page || 1, data?.perPage || 10); + firewallRules.value = res.data as IAdminFirewallRule[]; + firewallRulesCount.value = parseInt(res.headers["x-total-count"] as string, 10); + }; + + const fetchFirewallRuleById = async (uid: string) => { + const res = await firewallRulesApi.getFirewall(uid); + firewallRule.value = res.data as IAdminFirewallRule; + }; + + return { + firewallRules, + firewallRulesCount, + firewallRule, + fetchFirewallRulesList, + fetchFirewallRuleById, + }; +}); + +export default useFirewallRulesStore; diff --git a/ui/admin/src/store/modules/instance.ts b/ui/admin/src/store/modules/instance.ts new file mode 100644 index 00000000000..eb959993cc1 --- /dev/null +++ b/ui/admin/src/store/modules/instance.ts @@ -0,0 +1,58 @@ +import { defineStore } from "pinia"; +import { ref, computed } from "vue"; +import { IAdminAuth, IAdminUpdateSAML } from "@admin/interfaces/IInstance"; +import * as instanceApi from "../api/instance"; + +const useInstanceStore = defineStore("adminInstance", () => { + const authenticationSettings = ref({ + local: { + enabled: false, + }, + saml: { + enabled: false, + auth_url: "", + assertion_url: "", + idp: { + entity_id: "", + binding: { + post: "", + redirect: "", + }, + certificates: [], + }, + sp: { + sign_auth_requests: false, + certificate: "", + }, + }, + }); + + const isLocalAuthEnabled = computed(() => authenticationSettings.value?.local?.enabled); + const isSamlEnabled = computed(() => authenticationSettings.value?.saml?.enabled); + + const fetchAuthenticationSettings = async () => { + const response = await instanceApi.getAuthenticationSettings(); + authenticationSettings.value = response.data as IAdminAuth; + }; + + const updateLocalAuthentication = async (status: boolean) => { + await instanceApi.configureLocalAuthentication(status); + await fetchAuthenticationSettings(); + }; + + const updateSamlAuthentication = async (data: IAdminUpdateSAML) => { + await instanceApi.configureSAMLAuthentication(data); + await fetchAuthenticationSettings(); + }; + + return { + authenticationSettings, + isLocalAuthEnabled, + isSamlEnabled, + fetchAuthenticationSettings, + updateLocalAuthentication, + updateSamlAuthentication, + }; +}); + +export default useInstanceStore; diff --git a/ui/admin/src/store/modules/license.ts b/ui/admin/src/store/modules/license.ts new file mode 100644 index 00000000000..e4dbc5916d4 --- /dev/null +++ b/ui/admin/src/store/modules/license.ts @@ -0,0 +1,28 @@ +import { defineStore } from "pinia"; +import { ref, computed } from "vue"; +import { IAdminLicense } from "@admin/interfaces/ILicense"; +import * as apiLicense from "../api/license"; + +const useLicenseStore = defineStore("adminLicense", () => { + const license = ref({} as IAdminLicense); + const isExpired = computed(() => (license.value && license.value.expired) + || (license.value && license.value.expired === undefined)); + + const getLicense = async () => { + const res = await apiLicense.getLicense(); + license.value = res.data as IAdminLicense; + }; + + const uploadLicense = async (file: File) => { + await apiLicense.uploadLicense(file); + }; + + return { + license, + isExpired, + getLicense, + uploadLicense, + }; +}); + +export default useLicenseStore; diff --git a/ui/admin/src/store/modules/namespaces.ts b/ui/admin/src/store/modules/namespaces.ts new file mode 100644 index 00000000000..3885e7cf66b --- /dev/null +++ b/ui/admin/src/store/modules/namespaces.ts @@ -0,0 +1,58 @@ +import { defineStore } from "pinia"; +import { ref } from "vue"; +import { IAdminNamespace } from "@admin/interfaces/INamespace"; +import * as namespacesApi from "../api/namespaces"; + +const useNamespacesStore = defineStore("adminNamespaces", () => { + const namespaces = ref([]); + const namespaceCount = ref(0); + const namespace = ref({} as IAdminNamespace); + + const currentFilter = ref(""); + + const setFilter = (filter: string) => { + currentFilter.value = filter || ""; + }; + + const fetchNamespaceList = async (data?: { page?: number; perPage?: number; filter?: string }) => { + const page = data?.page || 1; + const perPage = data?.perPage || 10; + const filter = data?.filter ?? currentFilter.value ?? ""; + const res = await namespacesApi.fetchNamespaces(page, perPage, filter); + namespaces.value = res.data as IAdminNamespace[]; + namespaceCount.value = parseInt(res.headers["x-total-count"] as string, 10); + }; + + const fetchNamespaceById = async (id: string) => { + const { data } = await namespacesApi.getNamespace(id); + namespace.value = data as IAdminNamespace; + }; + + const exportNamespacesToCsv = async (filter: string) => { + const { data } = await namespacesApi.exportNamespaces(filter); + return data; + }; + + const deleteNamespace = async (tenant: string) => { + await namespacesApi.deleteNamespace(tenant); + }; + + const updateNamespace = async (data: IAdminNamespace) => { + await namespacesApi.updateNamespace(data); + }; + + return { + namespaces, + namespaceCount, + namespace, + currentFilter, + setFilter, + fetchNamespaceList, + fetchNamespaceById, + exportNamespacesToCsv, + deleteNamespace, + updateNamespace, + }; +}); + +export default useNamespacesStore; diff --git a/ui/admin/src/store/modules/sessions.ts b/ui/admin/src/store/modules/sessions.ts new file mode 100644 index 00000000000..dcd613121bd --- /dev/null +++ b/ui/admin/src/store/modules/sessions.ts @@ -0,0 +1,32 @@ +import { defineStore } from "pinia"; +import { ref } from "vue"; +import { IAdminSession } from "@admin/interfaces/ISession"; +import * as sessionsApi from "../api/sessions"; + +const useSessionsStore = defineStore("adminSessions", () => { + const sessions = ref>([]); + const sessionCount = ref(0); + const session = ref({} as IAdminSession); + + const fetchSessionList = async (data: { perPage: number; page: number }) => { + const res = await sessionsApi.fetchSessions(data.perPage, data.page); + + sessions.value = res.data as Array; + sessionCount.value = parseInt(res.headers["x-total-count"] as string, 10); + }; + + const fetchSessionById = async (uid: string) => { + const { data } = await sessionsApi.getSession(uid); + session.value = data as IAdminSession; + }; + + return { + sessions, + sessionCount, + session, + fetchSessionList, + fetchSessionById, + }; +}); + +export default useSessionsStore; diff --git a/ui/admin/src/store/modules/stats.ts b/ui/admin/src/store/modules/stats.ts new file mode 100644 index 00000000000..9d8cdc12bfd --- /dev/null +++ b/ui/admin/src/store/modules/stats.ts @@ -0,0 +1,16 @@ +import { defineStore } from "pinia"; +import { ref } from "vue"; +import { IAdminStats } from "@admin/interfaces/IStats"; +import getAdminStats from "../api/stats"; + +const useStatsStore = defineStore("adminStats", () => { + const stats = ref({} as IAdminStats); + const getStats = async () => { + const { data } = await getAdminStats(); + stats.value = data as IAdminStats; + }; + + return { stats, getStats }; +}); + +export default useStatsStore; diff --git a/ui/admin/src/store/modules/users.ts b/ui/admin/src/store/modules/users.ts new file mode 100644 index 00000000000..2fea5a9220a --- /dev/null +++ b/ui/admin/src/store/modules/users.ts @@ -0,0 +1,69 @@ +import { defineStore } from "pinia"; +import { ref } from "vue"; +import { IAdminUser, IAdminUserFormData } from "@admin/interfaces/IUser"; +import * as usersApi from "../api/users"; + +const useUsersStore = defineStore("adminUsers", () => { + const users = ref([]); + const usersCount = ref(0); + const user = ref({} as IAdminUser); + + const currentFilter = ref(""); + + const setFilter = (filter: string) => { + currentFilter.value = filter || ""; + }; + + const fetchUsersList = async (data?: { page?: number; perPage?: number; filter?: string }) => { + const filter = data?.filter ?? currentFilter.value ?? ""; + const res = await usersApi.fetchUsers(data?.page || 1, data?.perPage || 10, filter); + + users.value = res.data as IAdminUser[]; + usersCount.value = parseInt(res.headers["x-total-count"] as string, 10); + }; + + const exportUsersToCsv = async (filter: string) => { + const { data } = await usersApi.exportUsers(filter); + return data; + }; + + const addUser = async (data: IAdminUserFormData) => { + await usersApi.addUser(data); + }; + + const fetchUserById = async (id: string) => { + const { data } = await usersApi.getUser(id); + user.value = data as IAdminUser; + }; + + const updateUser = async (data: IAdminUserFormData) => { + const { id } = data; + await usersApi.updateUser(id as string, data); + }; + + const deleteUser = async (id: string) => { + await usersApi.deleteUser(id); + }; + + const resetUserPassword = async (id: string) => { + const { data } = await usersApi.resetUserPassword(id); + return data as string; + }; + + return { + users, + usersCount, + user, + currentFilter, + setFilter, + fetchUsersList, + exportUsersToCsv, + addUser, + fetchUserById, + updateUser, + deleteUser, + resetUserPassword, + }; +}); + +export default useUsersStore; diff --git a/ui/admin/src/styles/_variables.scss b/ui/admin/src/styles/_variables.scss new file mode 100644 index 00000000000..77091eda612 --- /dev/null +++ b/ui/admin/src/styles/_variables.scss @@ -0,0 +1,2 @@ +// Place SASS variable overrides here +// $font-size-root: 18px; diff --git a/ui/admin/src/styles/generic/_index.scss b/ui/admin/src/styles/generic/_index.scss new file mode 100644 index 00000000000..370cf176bd2 --- /dev/null +++ b/ui/admin/src/styles/generic/_index.scss @@ -0,0 +1,9 @@ +@import './reset.scss'; + +@import './animations.scss'; + +@import './colors.scss'; + +@import './elevation.scss'; + +@import './transitions.scss'; \ No newline at end of file diff --git a/ui/admin/src/views/AnnouncementDetails.vue b/ui/admin/src/views/AnnouncementDetails.vue new file mode 100644 index 00000000000..444f5c9dfa6 --- /dev/null +++ b/ui/admin/src/views/AnnouncementDetails.vue @@ -0,0 +1,178 @@ + + + + + diff --git a/ui/admin/src/views/Announcements.vue b/ui/admin/src/views/Announcements.vue new file mode 100644 index 00000000000..262923daf6d --- /dev/null +++ b/ui/admin/src/views/Announcements.vue @@ -0,0 +1,33 @@ + + + diff --git a/ui/admin/src/views/Dashboard.vue b/ui/admin/src/views/Dashboard.vue new file mode 100644 index 00000000000..865d0eee570 --- /dev/null +++ b/ui/admin/src/views/Dashboard.vue @@ -0,0 +1,135 @@ + + + diff --git a/ui/admin/src/views/Device.vue b/ui/admin/src/views/Device.vue new file mode 100644 index 00000000000..0e13495f1a8 --- /dev/null +++ b/ui/admin/src/views/Device.vue @@ -0,0 +1,55 @@ + + + diff --git a/ui/admin/src/views/DeviceDetails.vue b/ui/admin/src/views/DeviceDetails.vue new file mode 100644 index 00000000000..069aaab8881 --- /dev/null +++ b/ui/admin/src/views/DeviceDetails.vue @@ -0,0 +1,222 @@ + + + + + diff --git a/ui/admin/src/views/FirewallRules.vue b/ui/admin/src/views/FirewallRules.vue new file mode 100644 index 00000000000..f8357464edd --- /dev/null +++ b/ui/admin/src/views/FirewallRules.vue @@ -0,0 +1,16 @@ + + + diff --git a/ui/admin/src/views/FirewallRulesDetails.vue b/ui/admin/src/views/FirewallRulesDetails.vue new file mode 100644 index 00000000000..af94bcd19d8 --- /dev/null +++ b/ui/admin/src/views/FirewallRulesDetails.vue @@ -0,0 +1,152 @@ + + + + + diff --git a/ui/admin/src/views/NamespaceDetails.vue b/ui/admin/src/views/NamespaceDetails.vue new file mode 100644 index 00000000000..2563931221a --- /dev/null +++ b/ui/admin/src/views/NamespaceDetails.vue @@ -0,0 +1,346 @@ + + + + + diff --git a/ui/admin/src/views/Namespaces.vue b/ui/admin/src/views/Namespaces.vue new file mode 100644 index 00000000000..7896af173c7 --- /dev/null +++ b/ui/admin/src/views/Namespaces.vue @@ -0,0 +1,52 @@ + + + diff --git a/ui/admin/src/views/NewAnnouncement.vue b/ui/admin/src/views/NewAnnouncement.vue new file mode 100644 index 00000000000..e86cfc99a44 --- /dev/null +++ b/ui/admin/src/views/NewAnnouncement.vue @@ -0,0 +1,136 @@ + + + + + diff --git a/ui/admin/src/views/SessionDetails.vue b/ui/admin/src/views/SessionDetails.vue new file mode 100644 index 00000000000..ee2608ba552 --- /dev/null +++ b/ui/admin/src/views/SessionDetails.vue @@ -0,0 +1,153 @@ + + + + + diff --git a/ui/admin/src/views/Sessions.vue b/ui/admin/src/views/Sessions.vue new file mode 100644 index 00000000000..8379ac9ce7e --- /dev/null +++ b/ui/admin/src/views/Sessions.vue @@ -0,0 +1,15 @@ + + + diff --git a/ui/admin/src/views/Settings.vue b/ui/admin/src/views/Settings.vue new file mode 100644 index 00000000000..a14d0c32cbe --- /dev/null +++ b/ui/admin/src/views/Settings.vue @@ -0,0 +1,5 @@ + diff --git a/ui/admin/src/views/Unauthorized.vue b/ui/admin/src/views/Unauthorized.vue new file mode 100644 index 00000000000..6d3e9ddb892 --- /dev/null +++ b/ui/admin/src/views/Unauthorized.vue @@ -0,0 +1,111 @@ + + + diff --git a/ui/admin/src/views/UserDetails.vue b/ui/admin/src/views/UserDetails.vue new file mode 100644 index 00000000000..4d571779049 --- /dev/null +++ b/ui/admin/src/views/UserDetails.vue @@ -0,0 +1,290 @@ + + + diff --git a/ui/admin/src/views/Users.vue b/ui/admin/src/views/Users.vue new file mode 100644 index 00000000000..8a74ff23bcb --- /dev/null +++ b/ui/admin/src/views/Users.vue @@ -0,0 +1,59 @@ + + + diff --git a/ui/admin/tests/unit/components/Announcement/AnnouncementDelete.spec.ts b/ui/admin/tests/unit/components/Announcement/AnnouncementDelete.spec.ts new file mode 100644 index 00000000000..1ac1b16d669 --- /dev/null +++ b/ui/admin/tests/unit/components/Announcement/AnnouncementDelete.spec.ts @@ -0,0 +1,132 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useAnnouncementStore from "@admin/store/modules/announcement"; +import AnnouncementDelete from "@admin/components/Announcement/AnnouncementDelete.vue"; +import { Router } from "vue-router"; + +const triggerButtonTemplate = ` + +`; + +describe("AnnouncementDelete", () => { + let wrapper: VueWrapper>; + let announcementsStore: ReturnType; + let router: Router; + const mockUuid = "announcement-123"; + + const mountWrapper = (props: { redirect?: boolean } = {}) => { + router = createCleanAdminRouter(); + + wrapper = mountComponent(AnnouncementDelete, { + global: { plugins: [router] }, + props: { + uuid: mockUuid, + ...props, + }, + slots: { + default: triggerButtonTemplate, + }, + attachTo: document.body, + }); + + announcementsStore = useAnnouncementStore(); + }; + + const openDialog = async () => { + await wrapper.find('[data-test="trigger-button"]').trigger("click"); + return new DOMWrapper(document.body).find('[role="dialog"]'); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the slot content", () => { + const trigger = wrapper.find('[data-test="trigger-button"]'); + expect(trigger.exists()).toBe(true); + expect(trigger.text()).toBe("Delete"); + }); + + it("does not show the dialog initially", () => { + expect(new DOMWrapper(document.body).find('[role="dialog"]').exists()).toBe(false); + }); + }); + + describe("opening dialog", () => { + beforeEach(() => mountWrapper()); + + it("shows dialog when clicking the trigger", async () => { + const dialog = await openDialog(); + + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Are you sure?"); + expect(dialog.text()).toContain("You are about to remove this announcement"); + }); + }); + + describe("deleting announcement", () => { + it("calls store action and shows success message on confirm", async () => { + mountWrapper(); + + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(announcementsStore.deleteAnnouncement).toHaveBeenCalledWith(mockUuid); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Announcement deleted successfully."); + }); + + it("shows error message when delete fails", async () => { + mountWrapper(); + vi.mocked(announcementsStore.deleteAnnouncement).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to delete announcement."); + }); + + it("redirects to announcements page when redirect prop is true", async () => { + mountWrapper({ redirect: true }); + const pushSpy = vi.spyOn(router, "push"); + + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(pushSpy).toHaveBeenCalledWith({ name: "announcements" }); + }); + + it("emits update event when redirect prop is false", async () => { + mountWrapper({ redirect: false }); + + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update")).toBeTruthy(); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Announcement/AnnouncementEdit.spec.ts b/ui/admin/tests/unit/components/Announcement/AnnouncementEdit.spec.ts new file mode 100644 index 00000000000..9891a8e145f --- /dev/null +++ b/ui/admin/tests/unit/components/Announcement/AnnouncementEdit.spec.ts @@ -0,0 +1,198 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import useAnnouncementStore from "@admin/store/modules/announcement"; +import AnnouncementEdit from "@admin/components/Announcement/AnnouncementEdit.vue"; +import { mockAnnouncement, mockAnnouncementShort } from "../../mocks"; +import { createAxiosError } from "@tests/utils/axiosError"; + +vi.mock("@tinymce/tinymce-vue", () => ({ + default: { + name: "Editor", + // eslint-disable-next-line vue/max-len + template: '
', + props: ["modelValue", "init", "apiKey"], + }, +})); + +const triggerButtonTemplate = ` + +`; + +describe("AnnouncementEdit", () => { + let wrapper: VueWrapper>; + let announcementsStore: ReturnType; + + const mountWrapper = () => { + wrapper = mountComponent(AnnouncementEdit, { + props: { + announcementItem: mockAnnouncementShort, + showTooltip: false, + }, + slots: { default: triggerButtonTemplate }, + attachTo: document.body, + piniaOptions: { + initialState: { + adminAnnouncement: { announcement: mockAnnouncement }, + }, + }, + }); + + announcementsStore = useAnnouncementStore(); + }; + + const openDialog = async () => { + await wrapper.find('[data-test="trigger-button"]').trigger("click"); + return new DOMWrapper(document.body).find('[role="dialog"]'); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the slot content", () => { + const trigger = wrapper.find('[data-test="trigger-button"]'); + expect(trigger.exists()).toBe(true); + expect(trigger.text()).toBe("Edit"); + }); + + it("does not show the dialog initially", () => { + expect(new DOMWrapper(document.body).find('[role="dialog"]').exists()).toBe(false); + }); + }); + + describe("opening dialog", () => { + it("shows dialog and fetches announcement when clicking trigger", async () => { + mountWrapper(); + + const dialog = await openDialog(); + + expect(announcementsStore.fetchAnnouncement).toHaveBeenCalledWith(mockAnnouncementShort.uuid); + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Edit Announcement"); + }); + + it("displays current announcement title in form", async () => { + mountWrapper(); + + const dialog = await openDialog(); + const titleInput = dialog.find('input[type="text"]'); + expect((titleInput.element as HTMLInputElement).value).toBe(mockAnnouncement.title); + }); + + it("renders content editor", async () => { + mountWrapper(); + + const dialog = await openDialog(); + expect(dialog.find('[data-test="editor-mock"]').exists()).toBe(true); + }); + }); + + describe("form validation", () => { + it("shows error when submitting with empty title", async () => { + mountWrapper(); + + const dialog = await openDialog(); + const titleInput = dialog.find('input[type="text"]'); + await titleInput.setValue(""); + await flushPromises(); + + const submitBtn = dialog.find('[data-test="confirm-btn"]'); + await submitBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Please fill in all required fields."); + }); + + it("shows error when submitting with empty content", async () => { + mountWrapper(); + + const dialog = await openDialog(); + // Clear content via mocked editor + const editor = dialog.find('[data-test="editor-mock"]'); + await editor.setValue(""); + await flushPromises(); + + const submitBtn = dialog.find('[data-test="confirm-btn"]'); + await submitBtn.trigger("click"); + await flushPromises(); + + expect(dialog.find('[data-test="announcement-error"]').exists()).toBe(true); + expect(dialog.text()).toContain("The announcement cannot be empty!"); + expect(mockSnackbar.showError).toHaveBeenCalledWith("Please fill in all required fields."); + }); + }); + + describe("updating announcement", () => { + it("calls store action and shows success message on submit", async () => { + mountWrapper(); + + const dialog = await openDialog(); + const titleInput = dialog.find('input[type="text"]'); + await titleInput.setValue("Updated Title"); + + // Set content via mocked editor + const editor = dialog.find('[data-test="editor-mock"]'); + await editor.setValue("

Updated Content

"); + await flushPromises(); + + const submitBtn = dialog.find('[data-test="confirm-btn"]'); + await submitBtn.trigger("click"); + await flushPromises(); + + expect(announcementsStore.updateAnnouncement).toHaveBeenCalledWith( + mockAnnouncement.uuid, + expect.objectContaining({ + title: "Updated Title", + content: expect.stringContaining("Updated Content"), + }), + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Announcement updated successfully."); + }); + + it("shows error message when update fails", async () => { + mountWrapper(); + vi.mocked(announcementsStore.updateAnnouncement).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const dialog = await openDialog(); + const editor = dialog.find('[data-test="editor-mock"]'); + await editor.setValue("

Updated Content

"); + await flushPromises(); + + const submitBtn = dialog.find('[data-test="confirm-btn"]'); + await submitBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to update announcement."); + }); + + it("emits update event after successful update", async () => { + mountWrapper(); + + const dialog = await openDialog(); + const editor = dialog.find('[data-test="editor-mock"]'); + await editor.setValue("

Updated Content

"); + await flushPromises(); + + const submitBtn = dialog.find('[data-test="confirm-btn"]'); + await submitBtn.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update")).toBeTruthy(); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Announcement/AnnouncementList.spec.ts b/ui/admin/tests/unit/components/Announcement/AnnouncementList.spec.ts new file mode 100644 index 00000000000..cf56f4262b0 --- /dev/null +++ b/ui/admin/tests/unit/components/Announcement/AnnouncementList.spec.ts @@ -0,0 +1,146 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useAnnouncementStore from "@admin/store/modules/announcement"; +import AnnouncementList from "@admin/components/Announcement/AnnouncementList.vue"; +import { mockAnnouncements } from "../../mocks"; +import { Router } from "vue-router"; + +describe("AnnouncementList", () => { + let wrapper: VueWrapper>; + let router: Router; + let announcementsStore: ReturnType; + + const mountWrapper = (mockAnnouncementCount?: number) => { + router = createCleanAdminRouter(); + + wrapper = mountComponent(AnnouncementList, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { + adminAnnouncement: { + announcements: mockAnnouncements, + announcementCount: mockAnnouncementCount ?? mockAnnouncements.length, + }, + }, + }, + }); + + announcementsStore = useAnnouncementStore(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the data table", () => { + expect(wrapper.find('[data-test="announcement-list"]').exists()).toBe(true); + }); + + it("displays announcement UUIDs", () => { + const uuids = wrapper.findAll('[data-test="announcement-uuid"]'); + expect(uuids).toHaveLength(mockAnnouncements.length); + expect(uuids[0].text()).toContain(mockAnnouncements[0].uuid); + }); + + it("displays announcement titles", () => { + const titles = wrapper.findAll('[data-test="announcement-title"]'); + expect(titles).toHaveLength(mockAnnouncements.length); + expect(titles[0].text()).toBe(mockAnnouncements[0].title); + expect(titles[1].text()).toBe(mockAnnouncements[1].title); + }); + + it("displays action buttons for each announcement", () => { + const actionCells = wrapper.findAll('[data-test="announcement-actions"]'); + expect(actionCells).toHaveLength(mockAnnouncements.length); + }); + + it("displays edit buttons", () => { + const editButtons = wrapper.findAll('[data-test="edit-button"]'); + expect(editButtons).toHaveLength(mockAnnouncements.length); + }); + + it("displays delete buttons", () => { + const deleteButtons = wrapper.findAll('[data-test="delete-button"]'); + expect(deleteButtons).toHaveLength(mockAnnouncements.length); + }); + }); + + describe("fetching announcements", () => { + it("fetches announcements on mount", () => { + mountWrapper(); + + expect(announcementsStore.fetchAnnouncementList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 10, + page: 1, + orderBy: "desc", + }), + ); + }); + + it("refetches announcements when page changes", async () => { + mountWrapper(11); // Mock total count to 11 to enable pagination + + // Click next page button + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(announcementsStore.fetchAnnouncementList).toHaveBeenCalledWith( + expect.objectContaining({ + page: 2, + }), + ); + }); + + it("refetches announcements when items per page changes", async () => { + mountWrapper(20); + + // Change items per page via combobox + const ippCombo = wrapper.find('[data-test="ipp-combo"] input'); + await ippCombo.setValue(20); + await flushPromises(); + + expect(announcementsStore.fetchAnnouncementList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 20, + }), + ); + }); + }); + + describe("navigating to announcement details", () => { + it("navigates when clicking info icon", async () => { + mountWrapper(); + + const pushSpy = vi.spyOn(router, "push"); + const infoIcon = wrapper.findAll('[data-test="info-button"]')[0]; + + await infoIcon.trigger("click"); + + expect(pushSpy).toHaveBeenCalledWith({ + name: "announcementDetails", + params: { uuid: mockAnnouncements[0].uuid }, + }); + }); + }); + + describe("error handling", () => { + it("shows error snackbar when fetching announcements fails", async () => { + mountWrapper(); + vi.mocked(announcementsStore.fetchAnnouncementList).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to fetch announcements."); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Device/DeviceList.spec.ts b/ui/admin/tests/unit/components/Device/DeviceList.spec.ts new file mode 100644 index 00000000000..71fd0db682d --- /dev/null +++ b/ui/admin/tests/unit/components/Device/DeviceList.spec.ts @@ -0,0 +1,175 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useDevicesStore from "@admin/store/modules/devices"; +import DeviceList from "@admin/components/Device/DeviceList.vue"; +import { mockDevices } from "../../mocks"; +import { Router } from "vue-router"; + +describe("DeviceList", () => { + let wrapper: VueWrapper>; + let router: Router; + let devicesStore: ReturnType; + + const mountWrapper = (mockDeviceCount?: number) => { + router = createCleanAdminRouter(); + + wrapper = mountComponent(DeviceList, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { + adminDevices: { + devices: mockDevices, + deviceCount: mockDeviceCount ?? mockDevices.length, + }, + }, + }, + }); + + devicesStore = useDevicesStore(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the data table", () => { + expect(wrapper.find('[data-test="devices-list"]').exists()).toBe(true); + }); + + it("displays device names", () => { + expect(wrapper.text()).toContain(mockDevices[0].name); + expect(wrapper.text()).toContain(mockDevices[1].name); + }); + + it("displays device info", () => { + expect(wrapper.text()).toContain(mockDevices[0].info.pretty_name); + expect(wrapper.text()).toContain(mockDevices[1].info.pretty_name); + }); + + it("displays device namespaces", () => { + const namespaceLinks = wrapper.findAll('[data-test="namespace-link"]'); + expect(namespaceLinks).toHaveLength(mockDevices.length); + expect(namespaceLinks[0].text()).toBe(mockDevices[0].namespace); + }); + + it("displays online status icons", () => { + const onlineIcons = wrapper.findAll('[data-test="success-icon"]'); + const offlineIcons = wrapper.findAll('[data-test="error-icon"]'); + + const onlineCount = mockDevices.filter((d) => d.online).length; + const offlineCount = mockDevices.filter((d) => !d.online).length; + + expect(onlineIcons).toHaveLength(onlineCount); + expect(offlineIcons).toHaveLength(offlineCount); + }); + + it("displays device status", () => { + expect(wrapper.text()).toContain(mockDevices[0].status); + }); + + it("displays info buttons for each device", () => { + const infoButtons = wrapper.findAll('[data-test="info-button"]'); + expect(infoButtons).toHaveLength(mockDevices.length); + }); + }); + + describe("fetching devices", () => { + it("fetches devices on mount", () => { + mountWrapper(); + + expect(devicesStore.fetchDeviceList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 10, + page: 1, + }), + ); + }); + + it("refetches devices when page changes", async () => { + mountWrapper(11); // Mock total count to 11 to enable pagination + + // Click next page button + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(devicesStore.fetchDeviceList).toHaveBeenCalledWith( + expect.objectContaining({ + page: 2, + }), + ); + }); + + it("refetches devices when items per page changes", async () => { + mountWrapper(20); + + // Change items per page via combobox + const ippCombo = wrapper.find('[data-test="ipp-combo"] input'); + await ippCombo.setValue(20); + await flushPromises(); + + expect(devicesStore.fetchDeviceList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 20, + }), + ); + }); + }); + + describe("navigating to device details", () => { + it("navigates when clicking info button", async () => { + mountWrapper(); + + const pushSpy = vi.spyOn(router, "push"); + const infoButton = wrapper.findAll('[data-test="info-button"]')[0]; + + await infoButton.trigger("click"); + + expect(pushSpy).toHaveBeenCalledWith({ + name: "deviceDetails", + params: { id: mockDevices[0].uid }, + }); + }); + }); + + describe("navigating to namespace details", () => { + it("navigates when clicking namespace link", async () => { + mountWrapper(); + + const pushSpy = vi.spyOn(router, "push"); + const namespaceLink = wrapper.findAll('[data-test="namespace-link"]')[0]; + + await namespaceLink.trigger("click"); + + expect(pushSpy).toHaveBeenCalledWith( + expect.objectContaining({ + name: "namespaceDetails", + params: { id: mockDevices[0].tenant_id }, + }), + ); + }); + }); + + describe("error handling", () => { + it("shows error snackbar when fetching devices fails", async () => { + mountWrapper(11); + vi.mocked(devicesStore.fetchDeviceList).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + // Trigger refetch by changing page + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to fetch devices."); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/FirewallRules/FirewallRulesList.spec.ts b/ui/admin/tests/unit/components/FirewallRules/FirewallRulesList.spec.ts new file mode 100644 index 00000000000..98b2f6517b1 --- /dev/null +++ b/ui/admin/tests/unit/components/FirewallRules/FirewallRulesList.spec.ts @@ -0,0 +1,151 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useFirewallRulesStore from "@admin/store/modules/firewall_rules"; +import FirewallRulesList from "@admin/components/FirewallRules/FirewallRulesList.vue"; +import { mockFirewallRules } from "../../mocks"; +import { Router } from "vue-router"; + +describe("FirewallRulesList", () => { + let wrapper: VueWrapper>; + let router: Router; + let firewallRulesStore: ReturnType; + + const mountWrapper = (mockFirewallRulesCount?: number) => { + router = createCleanAdminRouter(); + + wrapper = mountComponent(FirewallRulesList, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { + adminFirewallRules: { + firewallRules: mockFirewallRules, + firewallRulesCount: mockFirewallRulesCount ?? mockFirewallRules.length, + }, + }, + }, + }); + + firewallRulesStore = useFirewallRulesStore(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the data table", () => { + expect(wrapper.find('[data-test="firewall-rules-list"]').exists()).toBe(true); + }); + + it("displays firewall rule tenant IDs", () => { + expect(wrapper.text()).toContain(mockFirewallRules[0].tenant_id); + expect(wrapper.text()).toContain(mockFirewallRules[1].tenant_id); + }); + + it("displays firewall rule priorities", () => { + expect(wrapper.text()).toContain(mockFirewallRules[0].priority.toString()); + expect(wrapper.text()).toContain(mockFirewallRules[1].priority.toString()); + }); + + it("displays firewall rule actions", () => { + expect(wrapper.text()).toContain(mockFirewallRules[0].action); + expect(wrapper.text()).toContain(mockFirewallRules[1].action); + }); + + it("displays firewall rule source IPs", () => { + expect(wrapper.text()).toContain(mockFirewallRules[0].source_ip); + expect(wrapper.text()).toContain(mockFirewallRules[1].source_ip); + }); + + it("displays firewall rule usernames", () => { + expect(wrapper.text()).toContain(mockFirewallRules[0].username); + expect(wrapper.text()).toContain(mockFirewallRules[1].username); + }); + + it("displays info buttons for each firewall rule", () => { + const infoButtons = wrapper.findAll('[data-test="info-button"]'); + expect(infoButtons).toHaveLength(mockFirewallRules.length); + }); + }); + + describe("fetching firewall rules", () => { + it("fetches firewall rules on mount", () => { + mountWrapper(); + + expect(firewallRulesStore.fetchFirewallRulesList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 10, + page: 1, + }), + ); + }); + + it("refetches firewall rules when page changes", async () => { + mountWrapper(11); // Mock total count to 11 to enable pagination + + // Click next page button + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(firewallRulesStore.fetchFirewallRulesList).toHaveBeenCalledWith( + expect.objectContaining({ + page: 2, + }), + ); + }); + + it("refetches firewall rules when items per page changes", async () => { + mountWrapper(20); + + // Change items per page via combobox + const ippCombo = wrapper.find('[data-test="ipp-combo"] input'); + await ippCombo.setValue(20); + await flushPromises(); + + expect(firewallRulesStore.fetchFirewallRulesList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 20, + }), + ); + }); + }); + + describe("navigating to firewall rule details", () => { + it("navigates when clicking info button", async () => { + mountWrapper(); + + const pushSpy = vi.spyOn(router, "push"); + const infoButton = wrapper.findAll('[data-test="info-button"]')[0]; + + await infoButton.trigger("click"); + + expect(pushSpy).toHaveBeenCalledWith({ + name: "firewallRulesDetails", + params: { id: mockFirewallRules[0].id }, + }); + }); + }); + + describe("error handling", () => { + it("shows error snackbar when fetching firewall rules fails", async () => { + mountWrapper(11); + vi.mocked(firewallRulesStore.fetchFirewallRulesList).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + // Trigger refetch by changing page + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to fetch firewall rules."); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Instance/ConfigureSSO.spec.ts b/ui/admin/tests/unit/components/Instance/ConfigureSSO.spec.ts new file mode 100644 index 00000000000..c4cf21ddb10 --- /dev/null +++ b/ui/admin/tests/unit/components/Instance/ConfigureSSO.spec.ts @@ -0,0 +1,563 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { DOMWrapper, flushPromises, VueWrapper } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useInstanceStore from "@admin/store/modules/instance"; +import ConfigureSSO from "@admin/components/Instance/SSO/ConfigureSSO.vue"; + +// Mock certificate from https://mocksaml.com/ +const validCertificate = ` +-----BEGIN CERTIFICATE----- +MIIC4jCCAcoCCQC33wnybT5QZDANBgkqhkiG9w0BAQsFADAyMQswCQYDVQQGEwJV +SzEPMA0GA1UECgwGQm94eUhRMRIwEAYDVQQDDAlNb2NrIFNBTUwwIBcNMjIwMjI4 +MjE0NjM4WhgPMzAyMTA3MDEyMTQ2MzhaMDIxCzAJBgNVBAYTAlVLMQ8wDQYDVQQK +DAZCb3h5SFExEjAQBgNVBAMMCU1vY2sgU0FNTDCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBALGfYettMsct1T6tVUwTudNJH5Pnb9GGnkXi9Zw/e6x45DD0 +RuRONbFlJ2T4RjAE/uG+AjXxXQ8o2SZfb9+GgmCHuTJFNgHoZ1nFVXCmb/Hg8Hpd +4vOAGXndixaReOiq3EH5XvpMjMkJ3+8+9VYMzMZOjkgQtAqO36eAFFfNKX7dTj3V +pwLkvz6/KFCq8OAwY+AUi4eZm5J57D31GzjHwfjH9WTeX0MyndmnNB1qV75qQR3b +2/W5sGHRv+9AarggJkF+ptUkXoLtVA51wcfYm6hILptpde5FQC8RWY1YrswBWAEZ +NfyrR4JeSweElNHg4NVOs4TwGjOPwWGqzTfgTlECAwEAATANBgkqhkiG9w0BAQsF +AAOCAQEAAYRlYflSXAWoZpFfwNiCQVE5d9zZ0DPzNdWhAybXcTyMf0z5mDf6FWBW +5Gyoi9u3EMEDnzLcJNkwJAAc39Apa4I2/tml+Jy29dk8bTyX6m93ngmCgdLh5Za4 +khuU3AM3L63g7VexCuO7kwkjh/+LqdcIXsVGO6XDfu2QOs1Xpe9zIzLpwm/RNYeX +UjbSj5ce/jekpAw7qyVVL4xOyh8AtUW1ek3wIw1MJvEgEPt0d16oshWJpoS1OT8L +r/22SvYEo3EmSGdTVGgk3x3s+A0qWAqTcyjr7Q4s/GKYRFfomGwz0TZ4Iw1ZN99M +m0eo2USlSRTVl7QHRTuiuSThHpLKQQ== +-----END CERTIFICATE----- +`; + +describe("ConfigureSSO", () => { + let wrapper: VueWrapper>; + let instanceStore: ReturnType; + + const mountWrapper = (isSamlEnabled = false) => { + wrapper = mountComponent(ConfigureSSO, { + props: { modelValue: true }, + attachTo: document.body, + piniaOptions: { + initialState: { + adminInstance: { + authenticationSettings: { + local: { enabled: true }, + saml: { + enabled: isSamlEnabled, + idp: { + entity_id: "", + binding: { post: "", redirect: "" }, + certificates: [], + mappings: undefined, + }, + sp: { sign_auth_requests: false }, + }, + }, + }, + }, + }, + }); + + instanceStore = useInstanceStore(); + }; + + const getDialog = () => new DOMWrapper(document.body); + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering and dialog visibility", () => { + beforeEach(() => mountWrapper()); + + it("renders the dialog when modelValue is true", async () => { + await flushPromises(); + const dialog = getDialog(); + expect(dialog.find('[role="dialog"]').exists()).toBe(true); + }); + + it("displays the dialog title", async () => { + await flushPromises(); + const dialog = getDialog(); + expect(dialog.text()).toContain("Configure Single Sign-on"); + }); + }); + + describe("metadata URL mode", () => { + beforeEach(() => mountWrapper()); + + it("shows metadata URL field when checkbox is enabled", async () => { + await flushPromises(); + const dialog = getDialog(); + + const checkbox = dialog.find('[data-test="checkbox-idp-metadata"] input'); + await checkbox.setValue(true); + await flushPromises(); + + expect(dialog.find('[data-test="idp-metadata-url"]').exists()).toBe(true); + expect(dialog.find('[data-test="idp-manual-section"]').exists()).toBe(false); + }); + + it("disables save button when metadata URL is empty", async () => { + await flushPromises(); + const dialog = getDialog(); + + const checkbox = dialog.find('[data-test="checkbox-idp-metadata"] input'); + await checkbox.setValue(true); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + expect(saveButton.attributes("disabled")).toBeDefined(); + }); + + it("enables save button when valid metadata URL is provided", async () => { + await flushPromises(); + const dialog = getDialog(); + + const checkbox = dialog.find('[data-test="checkbox-idp-metadata"] input'); + await checkbox.setValue(true); + await flushPromises(); + + const urlField = dialog.find('[data-test="idp-metadata-url"] input'); + await urlField.setValue("https://example.com/metadata"); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + expect(saveButton.attributes("disabled")).toBeUndefined(); + }); + + it("saves configuration with metadata URL", async () => { + await flushPromises(); + const dialog = getDialog(); + + vi.mocked(instanceStore.updateSamlAuthentication).mockResolvedValueOnce(); + + const checkbox = dialog.find('[data-test="checkbox-idp-metadata"] input'); + await checkbox.setValue(true); + await flushPromises(); + + const urlField = dialog.find('[data-test="idp-metadata-url"] input'); + await urlField.setValue("https://example.com/metadata"); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + await saveButton.trigger("click"); + await flushPromises(); + + expect(instanceStore.updateSamlAuthentication).toHaveBeenCalledWith({ + enable: true, + idp: { metadata_url: "https://example.com/metadata" }, + sp: { sign_requests: false }, + }); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Successfully updated SAML configuration."); + }); + }); + + describe("manual configuration mode", () => { + beforeEach(() => mountWrapper()); + + it("shows manual configuration fields by default", async () => { + await flushPromises(); + const dialog = getDialog(); + + expect(dialog.find('[data-test="idp-manual-section"]').exists()).toBe(true); + expect(dialog.find('[data-test="idp-signon-post-url"]').exists()).toBe(true); + expect(dialog.find('[data-test="idp-signon-redirect-url"]').exists()).toBe(true); + expect(dialog.find('[data-test="idp-entity-id"]').exists()).toBe(true); + expect(dialog.find('[data-test="idp-x509-certificate"]').exists()).toBe(true); + }); + + it("shows warning when no URLs are provided", async () => { + await flushPromises(); + const dialog = getDialog(); + + expect(dialog.find('[data-test="manual-config-info"]').exists()).toBe(true); + expect(dialog.text()).toContain("You need to provide at least one of the following URLs"); + }); + + it("disables save button when required fields are empty", async () => { + await flushPromises(); + const dialog = getDialog(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + expect(saveButton.attributes("disabled")).toBeDefined(); + }); + + it("enables save button when all required manual fields are filled", async () => { + await flushPromises(); + const dialog = getDialog(); + + const postUrlField = dialog.find('[data-test="idp-signon-post-url"] input'); + await postUrlField.setValue("https://idp.example.com/sso/post"); + await flushPromises(); + + const entityIdField = dialog.find('[data-test="idp-entity-id"] input'); + await entityIdField.setValue("https://idp.example.com"); + await flushPromises(); + + const certField = dialog.find('[data-test="idp-x509-certificate"] textarea'); + await certField.setValue(validCertificate); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + expect(saveButton.attributes("disabled")).toBeUndefined(); + }); + + it("accepts redirect URL instead of POST URL", async () => { + await flushPromises(); + const dialog = getDialog(); + + const redirectUrlField = dialog.find('[data-test="idp-signon-redirect-url"] input'); + await redirectUrlField.setValue("https://idp.example.com/sso/redirect"); + await flushPromises(); + + const entityIdField = dialog.find('[data-test="idp-entity-id"] input'); + await entityIdField.setValue("https://idp.example.com"); + await flushPromises(); + + const certField = dialog.find('[data-test="idp-x509-certificate"] textarea'); + await certField.setValue(validCertificate); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + expect(saveButton.attributes("disabled")).toBeUndefined(); + }); + + it("saves configuration with manual settings", async () => { + await flushPromises(); + const dialog = getDialog(); + + vi.mocked(instanceStore.updateSamlAuthentication).mockResolvedValueOnce(); + + const postUrlField = dialog.find('[data-test="idp-signon-post-url"] input'); + await postUrlField.setValue("https://idp.example.com/sso/post"); + await flushPromises(); + + const redirectUrlField = dialog.find('[data-test="idp-signon-redirect-url"] input'); + await redirectUrlField.setValue("https://idp.example.com/sso/redirect"); + await flushPromises(); + + const entityIdField = dialog.find('[data-test="idp-entity-id"] input'); + await entityIdField.setValue("https://idp.example.com"); + await flushPromises(); + + const certField = dialog.find('[data-test="idp-x509-certificate"] textarea'); + await certField.setValue(validCertificate); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + await saveButton.trigger("click"); + await flushPromises(); + + expect(instanceStore.updateSamlAuthentication).toHaveBeenCalledWith({ + enable: true, + idp: { + entity_id: "https://idp.example.com", + binding: { + post: "https://idp.example.com/sso/post", + redirect: "https://idp.example.com/sso/redirect", + }, + certificate: expect.stringContaining("BEGIN CERTIFICATE"), + }, + sp: { sign_requests: false }, + }); + }); + }); + + describe("certificate validation", () => { + beforeEach(() => mountWrapper()); + + it("shows error when certificate is missing BEGIN/END blocks", async () => { + await flushPromises(); + const dialog = getDialog(); + + const certField = dialog.find('[data-test="idp-x509-certificate"] textarea'); + await certField.setValue("INVALIDCERTIFICATEDATA"); + await flushPromises(); + + expect(dialog.text()).toContain("Certificate must include -----BEGIN CERTIFICATE----- and -----END CERTIFICATE----- blocks"); + }); + + it("shows error when certificate format is invalid", async () => { + await flushPromises(); + const dialog = getDialog(); + + const invalidCert = `-----BEGIN CERTIFICATE----- +INVALIDCERTIFICATEDATA +-----END CERTIFICATE-----`; + + const certField = dialog.find('[data-test="idp-x509-certificate"] textarea'); + await certField.setValue(invalidCert); + await flushPromises(); + + expect(dialog.text()).toContain("Invalid X.509 certificate"); + }); + + it("accepts valid certificate", async () => { + await flushPromises(); + const dialog = getDialog(); + + const certField = dialog.find('[data-test="idp-x509-certificate"] textarea'); + await certField.setValue(validCertificate); + await flushPromises(); + + // No error message should be displayed + const certFieldWrapper = dialog.find('[data-test="idp-x509-certificate"]'); + expect(certFieldWrapper.text()).not.toContain("Invalid X.509 certificate"); + expect(certFieldWrapper.text()).not.toContain("Certificate must include"); + }); + }); + + describe("advanced settings - SAML mappings", () => { + beforeEach(() => mountWrapper()); + + it("shows SAML mappings table when advanced settings is expanded", async () => { + await flushPromises(); + const dialog = getDialog(); + + const advancedSettings = dialog.find('[data-test="advanced-settings-title"]'); + await advancedSettings.trigger("click"); + await flushPromises(); + + expect(dialog.find('[data-test="saml-mappings-table"]').exists()).toBe(true); + expect(dialog.find('[data-test="add-mapping-btn"]').exists()).toBe(true); + }); + + it("adds a mapping when Add Mapping button is clicked", async () => { + await flushPromises(); + const dialog = getDialog(); + + const advancedSettings = dialog.find('[data-test="advanced-settings-title"]'); + await advancedSettings.trigger("click"); + await flushPromises(); + + const addButton = dialog.find('[data-test="add-mapping-btn"]'); + await addButton.trigger("click"); + await flushPromises(); + + expect(dialog.findAll('[data-test="saml-mapping-key"]')).toHaveLength(1); + }); + + it("allows adding up to 2 mappings", async () => { + await flushPromises(); + const dialog = getDialog(); + + const advancedSettings = dialog.find('[data-test="advanced-settings-title"]'); + await advancedSettings.trigger("click"); + await flushPromises(); + + const addButton = dialog.find('[data-test="add-mapping-btn"]'); + await addButton.trigger("click"); + await flushPromises(); + await addButton.trigger("click"); + await flushPromises(); + + expect(dialog.findAll('[data-test="saml-mapping-key"]')).toHaveLength(2); + expect(addButton.attributes("disabled")).toBeDefined(); + }); + + it("removes a mapping when remove button is clicked", async () => { + await flushPromises(); + const dialog = getDialog(); + + const advancedSettings = dialog.find('[data-test="advanced-settings-title"]'); + await advancedSettings.trigger("click"); + await flushPromises(); + + const addButton = dialog.find('[data-test="add-mapping-btn"]'); + await addButton.trigger("click"); + await flushPromises(); + + const removeButton = dialog.find('[data-test="remove-mapping-btn"]'); + await removeButton.trigger("click"); + await flushPromises(); + + expect(dialog.findAll('[data-test="saml-mapping-key"]')).toHaveLength(0); + }); + + it("sends mappings in save request when configured", async () => { + await flushPromises(); + const dialog = getDialog(); + + vi.mocked(instanceStore.updateSamlAuthentication).mockResolvedValueOnce(); + + // Fill required fields + const checkbox = dialog.find('[data-test="checkbox-idp-metadata"] input'); + await checkbox.setValue(true); + await flushPromises(); + + const urlField = dialog.find('[data-test="idp-metadata-url"] input'); + await urlField.setValue("https://example.com/metadata"); + await flushPromises(); + + // Add mappings + const advancedSettings = dialog.find('[data-test="advanced-settings-title"]'); + await advancedSettings.trigger("click"); + await flushPromises(); + + const addButton = dialog.find('[data-test="add-mapping-btn"]'); + await addButton.trigger("click"); + await flushPromises(); + + const mappingValue = dialog.find('[data-test="saml-mapping-value"] input'); + await mappingValue.setValue("custom.email"); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + await saveButton.trigger("click"); + await flushPromises(); + + expect(instanceStore.updateSamlAuthentication).toHaveBeenCalledWith( + expect.objectContaining({ + idp: expect.objectContaining({ + mappings: expect.any(Object), + }), + }), + ); + }); + + it("shows sign request checkbox in advanced settings", async () => { + await flushPromises(); + const dialog = getDialog(); + + const advancedSettings = dialog.find('[data-test="advanced-settings-title"]'); + await advancedSettings.trigger("click"); + await flushPromises(); + + expect(dialog.find('[data-test="sign-request-checkbox"]').exists()).toBe(true); + }); + + it("sends sign_requests setting when enabled", async () => { + await flushPromises(); + const dialog = getDialog(); + + vi.mocked(instanceStore.updateSamlAuthentication).mockResolvedValueOnce(); + + const checkbox = dialog.find('[data-test="checkbox-idp-metadata"] input'); + await checkbox.setValue(true); + await flushPromises(); + + const urlField = dialog.find('[data-test="idp-metadata-url"] input'); + await urlField.setValue("https://example.com/metadata"); + await flushPromises(); + + const advancedSettings = dialog.find('[data-test="advanced-settings-title"]'); + await advancedSettings.trigger("click"); + await flushPromises(); + + const signRequestCheckbox = dialog.find('[data-test="sign-request-checkbox"] input'); + await signRequestCheckbox.setValue(true); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + await saveButton.trigger("click"); + await flushPromises(); + + expect(instanceStore.updateSamlAuthentication).toHaveBeenCalledWith( + expect.objectContaining({ + sp: { sign_requests: true }, + }), + ); + }); + }); + + describe("editing existing SAML configuration", () => { + it("populates fields when opening with existing SAML config", async () => { + wrapper = mountComponent(ConfigureSSO, { + props: { modelValue: false }, + attachTo: document.body, + piniaOptions: { + initialState: { + adminInstance: { + authenticationSettings: { + local: { enabled: true }, + saml: { + enabled: true, + idp: { + entity_id: "https://existing.idp.com", + binding: { + post: "https://existing.idp.com/sso/post", + redirect: "https://existing.idp.com/sso/redirect", + }, + certificates: [validCertificate], + mappings: { + email: "custom.email", + name: "custom.name", + }, + }, + sp: { sign_auth_requests: true }, + }, + }, + }, + }, + }, + }); + + instanceStore = useInstanceStore(); + + // Open dialog + await wrapper.setProps({ modelValue: true }); + await flushPromises(); + + const dialog = getDialog(); + + const postUrlField = dialog.find('[data-test="idp-signon-post-url"] input').element as HTMLInputElement; + const redirectUrlField = dialog.find('[data-test="idp-signon-redirect-url"] input').element as HTMLInputElement; + const entityIdField = dialog.find('[data-test="idp-entity-id"] input').element as HTMLInputElement; + + // Check that fields are populated + expect(postUrlField.value).toBe("https://existing.idp.com/sso/post"); + expect(redirectUrlField.value).toBe("https://existing.idp.com/sso/redirect"); + expect(entityIdField.value).toBe("https://existing.idp.com"); + + // Check advanced settings + const advancedSettings = dialog.find('[data-test="advanced-settings-title"]'); + await advancedSettings.trigger("click"); + await flushPromises(); + + expect(dialog.findAll('[data-test="saml-mapping-key"]')).toHaveLength(2); + }); + }); + + describe("error handling", () => { + beforeEach(() => mountWrapper()); + + it("shows error snackbar when save fails", async () => { + await flushPromises(); + const dialog = getDialog(); + + vi.mocked(instanceStore.updateSamlAuthentication).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const checkbox = dialog.find('[data-test="checkbox-idp-metadata"] input'); + await checkbox.setValue(true); + await flushPromises(); + + const urlField = dialog.find('[data-test="idp-metadata-url"] input'); + await urlField.setValue("https://example.com/metadata"); + await flushPromises(); + + const saveButton = dialog.find('[data-test="confirm-btn"]'); + await saveButton.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to update SAML configuration."); + }); + }); + + describe("closing dialog", () => { + beforeEach(() => mountWrapper()); + + it("closes dialog when cancel button is clicked", async () => { + await flushPromises(); + const dialog = getDialog(); + + const cancelButton = dialog.find('[data-test="cancel-btn"]'); + await cancelButton.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update:modelValue")).toBeTruthy(); + expect(wrapper.emitted("update:modelValue")?.[0]).toEqual([false]); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Namespaces/NamespaceDelete.spec.ts b/ui/admin/tests/unit/components/Namespaces/NamespaceDelete.spec.ts new file mode 100644 index 00000000000..4fe7d87ffaf --- /dev/null +++ b/ui/admin/tests/unit/components/Namespaces/NamespaceDelete.spec.ts @@ -0,0 +1,156 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useNamespacesStore from "@admin/store/modules/namespaces"; +import NamespaceDelete from "@admin/components/Namespace/NamespaceDelete.vue"; +import { Router } from "vue-router"; + +describe("NamespaceDelete", () => { + let wrapper: VueWrapper>; + let namespacesStore: ReturnType; + let router: Router; + const mockTenantId = "tenant-123"; + const mockNamespaceName = "test-namespace"; + + const mountWrapper = async (routeName: string = "namespaceDetails") => { + const params = routeName === "namespaceDetails" ? { id: mockTenantId } : {}; + router = createCleanAdminRouter(); + await router.push({ name: routeName, params }); + await router.isReady(); + + wrapper = mountComponent(NamespaceDelete, { + global: { plugins: [router] }, + props: { + tenant: mockTenantId, + name: mockNamespaceName, + modelValue: true, + }, + attachTo: document.body, + }); + + namespacesStore = useNamespacesStore(); + }; + + const getDialog = () => new DOMWrapper(document.body).find('[role="dialog"]'); + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("shows the dialog when modelValue is true", async () => { + await flushPromises(); + const dialog = getDialog(); + + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Namespace Deletion"); + }); + + it("displays the namespace name in the content", async () => { + await flushPromises(); + const dialog = getDialog(); + const content = dialog.find('[data-test="content-text"]'); + + expect(content.text()).toContain("This action cannot be undone"); + expect(content.text()).toContain(mockNamespaceName); + }); + + it("shows remove and close buttons", async () => { + await flushPromises(); + const dialog = getDialog(); + + expect(dialog.find('[data-test="remove-btn"]').exists()).toBe(true); + expect(dialog.find('[data-test="close-btn"]').exists()).toBe(true); + }); + }); + + describe("deleting namespace from details page", () => { + beforeEach(() => mountWrapper("namespaceDetails")); + + it("calls store action, shows success, and redirects when confirm is clicked", async () => { + await flushPromises(); + const dialog = getDialog(); + const pushSpy = vi.spyOn(router, "push"); + + const confirmBtn = dialog.find('[data-test="remove-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(namespacesStore.deleteNamespace).toHaveBeenCalledWith(mockTenantId); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Namespace deleted successfully."); + expect(pushSpy).toHaveBeenCalledWith({ name: "namespaces" }); + }); + + it("shows error message when delete fails", async () => { + await flushPromises(); + vi.mocked(namespacesStore.deleteNamespace).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="remove-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("An error occurred while deleting the namespace."); + expect(wrapper.emitted("update:modelValue")?.[0]).toBeUndefined(); + }); + }); + + describe("deleting namespace from list page", () => { + beforeEach(() => mountWrapper("namespaces")); + + it("calls store action, shows success, and emits update when confirm is clicked", async () => { + await flushPromises(); + const dialog = getDialog(); + const pushSpy = vi.spyOn(router, "push"); + + const confirmBtn = dialog.find('[data-test="remove-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(namespacesStore.deleteNamespace).toHaveBeenCalledWith(mockTenantId); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Namespace deleted successfully."); + expect(pushSpy).not.toHaveBeenCalled(); + expect(wrapper.emitted("update")).toBeTruthy(); + expect(wrapper.emitted("update:modelValue")).toBeTruthy(); + }); + + it("does not emit update when delete fails", async () => { + await flushPromises(); + vi.mocked(namespacesStore.deleteNamespace).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="remove-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("An error occurred while deleting the namespace."); + expect(wrapper.emitted("update")).toBeUndefined(); + }); + }); + + describe("closing dialog", () => { + beforeEach(() => mountWrapper()); + + it("closes dialog when cancel button is clicked", async () => { + await flushPromises(); + const dialog = getDialog(); + + const cancelBtn = dialog.find('[data-test="close-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update:modelValue")).toBeTruthy(); + expect(wrapper.emitted("update:modelValue")?.[0]).toEqual([false]); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Namespaces/NamespaceEdit.spec.ts b/ui/admin/tests/unit/components/Namespaces/NamespaceEdit.spec.ts new file mode 100644 index 00000000000..385214d512b --- /dev/null +++ b/ui/admin/tests/unit/components/Namespaces/NamespaceEdit.spec.ts @@ -0,0 +1,248 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useNamespacesStore from "@admin/store/modules/namespaces"; +import NamespaceEdit from "@admin/components/Namespace/NamespaceEdit.vue"; +import { mockNamespace } from "../../mocks"; + +describe("NamespaceEdit", () => { + let wrapper: VueWrapper>; + let namespacesStore: ReturnType; + + const mountWrapper = () => { + wrapper = mountComponent(NamespaceEdit, { + props: { + namespace: mockNamespace, + modelValue: true, + }, + attachTo: document.body, + }); + + namespacesStore = useNamespacesStore(); + }; + + const getDialog = () => new DOMWrapper(document.body).find('[role="dialog"]'); + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("shows the dialog when modelValue is true", async () => { + await flushPromises(); + const dialog = getDialog(); + + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Edit Namespace"); + }); + + it("displays current namespace values in form", async () => { + await flushPromises(); + const dialog = getDialog(); + + const nameInput = dialog.find('[data-test="name-text"] input'); + expect((nameInput.element as HTMLInputElement).value).toBe(mockNamespace.name); + + const maxDevicesInput = dialog.find('[data-test="maxDevices-text"] input'); + expect((maxDevicesInput.element as HTMLInputElement).value).toBe(mockNamespace.max_devices.toString()); + }); + + it("shows save and cancel buttons", async () => { + await flushPromises(); + const dialog = getDialog(); + + expect(dialog.find('[data-test="confirm-btn"]').exists()).toBe(true); + expect(dialog.find('[data-test="cancel-btn"]').exists()).toBe(true); + }); + }); + + describe("form validation", () => { + beforeEach(() => mountWrapper()); + + it("shows error when name is empty", async () => { + await flushPromises(); + const dialog = getDialog(); + + const nameInput = dialog.find('[data-test="name-text"] input'); + await nameInput.setValue(""); + await flushPromises(); + + expect(dialog.text()).toContain("this is a required field"); + }); + + it("shows error when max devices is below minimum", async () => { + await flushPromises(); + const dialog = getDialog(); + + const maxDevicesInput = dialog.find('[data-test="maxDevices-text"] input'); + await maxDevicesInput.setValue("-2"); + await flushPromises(); + + expect(dialog.text()).toContain("Maximum devices must be -1 (unlimited) or greater"); + }); + + it("accepts -1 for unlimited devices", async () => { + await flushPromises(); + const dialog = getDialog(); + + const maxDevicesInput = dialog.find('[data-test="maxDevices-text"] input'); + await maxDevicesInput.setValue("-1"); + await flushPromises(); + + expect(dialog.text()).not.toContain("Maximum devices must be -1"); + }); + + it("disables save button when form has errors", async () => { + await flushPromises(); + const dialog = getDialog(); + + const nameInput = dialog.find('[data-test="name-text"] input'); + await nameInput.setValue(""); + await flushPromises(); + + const saveBtn = dialog.find('[data-test="confirm-btn"]'); + expect(saveBtn.attributes("disabled")).toBeDefined(); + }); + }); + + describe("updating namespace", () => { + it("calls store action with updated values on submit", async () => { + mountWrapper(); + await flushPromises(); + const dialog = getDialog(); + + const nameInput = dialog.find('[data-test="name-text"] input'); + await nameInput.setValue("updated-namespace"); + await flushPromises(); + + const maxDevicesInput = dialog.find('[data-test="maxDevices-text"] input'); + await maxDevicesInput.setValue("42"); + await flushPromises(); + + const saveBtn = dialog.find('[data-test="confirm-btn"]'); + await saveBtn.trigger("click"); + await flushPromises(); + + expect(namespacesStore.updateNamespace).toHaveBeenCalledWith( + expect.objectContaining({ + name: "updated-namespace", + max_devices: 42, + settings: expect.objectContaining({ + session_record: mockNamespace.settings.session_record, + }), + }), + ); + }); + + it("updates session record setting", async () => { + mountWrapper(); + await flushPromises(); + const dialog = getDialog(); + + // Find and toggle session record switch + const sessionRecordSwitch = dialog.find('input[type="checkbox"]'); + const currentValue = (sessionRecordSwitch.element as HTMLInputElement).checked; + await sessionRecordSwitch.setValue(!currentValue); + await flushPromises(); + + const saveBtn = dialog.find('[data-test="confirm-btn"]'); + await saveBtn.trigger("click"); + await flushPromises(); + + expect(namespacesStore.updateNamespace).toHaveBeenCalledWith( + expect.objectContaining({ + settings: expect.objectContaining({ + session_record: !currentValue, + }), + }), + ); + }); + + it("shows success message and closes dialog after successful update", async () => { + mountWrapper(); + await flushPromises(); + const dialog = getDialog(); + + const saveBtn = dialog.find('[data-test="confirm-btn"]'); + await saveBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Namespace updated successfully."); + expect(wrapper.emitted("update:modelValue")).toBeTruthy(); + expect(wrapper.emitted("update:modelValue")?.[0]).toEqual([false]); + }); + + it("emits update event after successful update", async () => { + mountWrapper(); + await flushPromises(); + const dialog = getDialog(); + + const saveBtn = dialog.find('[data-test="confirm-btn"]'); + await saveBtn.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update")).toBeTruthy(); + }); + + it("shows error message when update fails", async () => { + mountWrapper(); + vi.mocked(namespacesStore.updateNamespace).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + await flushPromises(); + const dialog = getDialog(); + + const saveBtn = dialog.find('[data-test="confirm-btn"]'); + await saveBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to update namespace."); + expect(wrapper.emitted("update:modelValue")?.[0]).toBeUndefined(); + }); + }); + + describe("closing dialog", () => { + beforeEach(() => mountWrapper()); + + it("closes dialog when cancel button is clicked", async () => { + await flushPromises(); + const dialog = getDialog(); + + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update:modelValue")).toBeTruthy(); + expect(wrapper.emitted("update:modelValue")?.[0]).toEqual([false]); + }); + + it("resets form fields when dialog is closed", async () => { + await flushPromises(); + const dialog = getDialog(); + + // Change a field + const nameInput = dialog.find('[data-test="name-text"] input'); + await nameInput.setValue("changed-name"); + await flushPromises(); + + // Close dialog + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + // Reopen dialog by setting modelValue to true again + await wrapper.setProps({ modelValue: true }); + await flushPromises(); + + // Check that field is reset to original value + const dialogReopened = getDialog(); + const nameInputReopened = dialogReopened.find('[data-test="name-text"] input'); + expect((nameInputReopened.element as HTMLInputElement).value).toBe(mockNamespace.name); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Namespaces/NamespaceExport.spec.ts b/ui/admin/tests/unit/components/Namespaces/NamespaceExport.spec.ts new file mode 100644 index 00000000000..6aabd48ad90 --- /dev/null +++ b/ui/admin/tests/unit/components/Namespaces/NamespaceExport.spec.ts @@ -0,0 +1,390 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import { saveAs } from "file-saver"; +import useNamespacesStore from "@admin/store/modules/namespaces"; +import NamespaceExport from "@admin/components/Namespace/NamespaceExport.vue"; + +vi.mock("file-saver", () => ({ + saveAs: vi.fn(), +})); + +describe("NamespaceExport", () => { + let wrapper: VueWrapper>; + let namespacesStore: ReturnType; + + const mountWrapper = () => { + wrapper = mountComponent(NamespaceExport, { + attachTo: document.body, + }); + + namespacesStore = useNamespacesStore(); + }; + + const getDialog = () => new DOMWrapper(document.body).find('[role="dialog"]'); + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the export button", () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + expect(exportBtn.exists()).toBe(true); + expect(exportBtn.text()).toContain("Export CSV"); + }); + + it("does not show dialog initially", () => { + expect(getDialog().exists()).toBe(false); + }); + }); + + describe("opening dialog", () => { + beforeEach(() => mountWrapper()); + + it("shows dialog when clicking export button", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Export namespaces data"); + }); + + it("displays all filter options", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + expect(dialog.text()).toContain("Namespaces with more than:"); + expect(dialog.text()).toContain("Namespaces with no devices"); + expect(dialog.text()).toContain("Namespace with devices, but no sessions"); + }); + + it("has 'more than' filter selected by default", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const moreThanRadio = dialog.find('[data-test="radio-more-than"] input'); + expect((moreThanRadio.element as HTMLInputElement).checked).toBe(true); + }); + + it("shows number of devices input enabled by default", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-devices-input"]'); + expect(numberInput.attributes("disabled")).toBeUndefined(); + }); + }); + + describe("filter selection", () => { + beforeEach(() => mountWrapper()); + + it("disables number input when selecting 'no devices' filter", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const noDevicesRadio = dialog.find('[data-test="radio-no-devices"] input'); + await noDevicesRadio.setValue(true); + await flushPromises(); + + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + expect(numberInput.attributes("disabled")).toBeDefined(); + }); + + it("disables number input when selecting 'no sessions' filter", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const noSessionsRadio = dialog.find('[data-test="radio-no-sessions"] input'); + await noSessionsRadio.setValue(true); + await flushPromises(); + + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + expect(numberInput.attributes("disabled")).toBeDefined(); + }); + + it("enables number input when selecting 'more than' filter", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + + // Select another filter first + const radioNoDevices = dialog.find('[data-test="radio-no-devices"] input'); + await radioNoDevices.setValue(true); + await flushPromises(); + + // Then select "more than" again + const radioMoreThan = dialog.find('[data-test="radio-more-than"] input'); + await radioMoreThan.setValue(true); + await flushPromises(); + + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + expect(numberInput.attributes("disabled")).toBeUndefined(); + }); + }); + + describe("form validation", () => { + beforeEach(() => mountWrapper()); + + it("shows error for negative number", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + await numberInput.setValue("-1"); + await flushPromises(); + + expect(dialog.text()).toContain("this must be greater than or equal to 0"); + }); + + it("accepts zero as valid input", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + await numberInput.setValue("0"); + await flushPromises(); + + expect(dialog.text()).not.toContain("must be greater than or equal to 0"); + }); + + it("accepts positive numbers", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + await numberInput.setValue("10"); + await flushPromises(); + + expect(dialog.text()).not.toContain("must be greater than or equal to 0"); + }); + + it("disables export button when validation fails", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + await numberInput.setValue("-1"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + expect(confirmBtn.attributes("disabled")).toBeDefined(); + }); + }); + + describe("exporting namespaces", () => { + it("exports with 'more than' filter", async () => { + mountWrapper(); + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + await numberInput.setValue("5"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(namespacesStore.exportNamespacesToCsv).toHaveBeenCalledWith(expect.any(String)); + expect(saveAs).toHaveBeenCalledWith( + expect.any(Blob), + "namespaces_more_than_5_devices.csv", + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Namespaces exported successfully."); + }); + + it("exports with 'no devices' filter", async () => { + mountWrapper(); + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const noDevicesRadio = dialog.find('[data-test="radio-no-devices"] input'); + await noDevicesRadio.setValue(true); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(namespacesStore.exportNamespacesToCsv).toHaveBeenCalled(); + expect(saveAs).toHaveBeenCalledWith( + expect.any(Blob), + "namespaces_no_devices.csv", + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Namespaces exported successfully."); + }); + + it("exports with 'no sessions' filter", async () => { + mountWrapper(); + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const noSessionsRadio = dialog.find('[data-test="radio-no-sessions"] input'); + await noSessionsRadio.setValue(true); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(namespacesStore.exportNamespacesToCsv).toHaveBeenCalled(); + expect(saveAs).toHaveBeenCalledWith( + expect.any(Blob), + "namespaces_with_devices_but_no_sessions.csv", + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Namespaces exported successfully."); + }); + + it("closes dialog after successful export", async () => { + mountWrapper(); + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + const dialogContent = getDialog().find(".v-overlay__content"); + expect(dialogContent.attributes("style")).toContain("display: none;"); + }); + }); + + describe("error handling", () => { + it("shows error snackbar when export fails", async () => { + mountWrapper(); + vi.mocked(namespacesStore.exportNamespacesToCsv).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Error exporting namespaces."); + expect(saveAs).not.toHaveBeenCalled(); + }); + + it("keeps dialog open when export fails", async () => { + mountWrapper(); + vi.mocked(namespacesStore.exportNamespacesToCsv).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(getDialog().exists()).toBe(true); + }); + }); + + describe("closing dialog", () => { + beforeEach(() => mountWrapper()); + + it("closes dialog when cancel button is clicked", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + const dialogContent = getDialog().find(".v-overlay__content"); + expect(dialogContent.attributes("style")).toContain("display: none;"); + }); + + it("resets form when dialog is closed and reopened", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + let dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-devices-input"] input'); + await numberInput.setValue("42"); + await flushPromises(); + + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + // Reopen dialog + await exportBtn.trigger("click"); + await flushPromises(); + + dialog = getDialog(); + const numberInputReopened = dialog.find('[data-test="number-of-devices-input"] input'); + expect((numberInputReopened.element as HTMLInputElement).value).toBe("0"); + }); + + it("resets filter selection when dialog is closed and reopened", async () => { + const exportBtn = wrapper.find('[data-test="namespaces-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + let dialog = getDialog(); + const noDevicesRadio = dialog.find('[data-test="radio-no-devices"] input'); + await noDevicesRadio.setValue(true); + await flushPromises(); + + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + // Reopen dialog + await exportBtn.trigger("click"); + await flushPromises(); + + dialog = getDialog(); + const moreThanRadio = dialog.find('[data-test="radio-more-than"] input'); + expect((moreThanRadio.element as HTMLInputElement).checked).toBe(true); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Namespaces/NamespaceList.spec.ts b/ui/admin/tests/unit/components/Namespaces/NamespaceList.spec.ts new file mode 100644 index 00000000000..9e6cdf2a55c --- /dev/null +++ b/ui/admin/tests/unit/components/Namespaces/NamespaceList.spec.ts @@ -0,0 +1,199 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { Router } from "vue-router"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useNamespacesStore from "@admin/store/modules/namespaces"; +import NamespaceList from "@admin/components/Namespace/NamespaceList.vue"; +import { mockNamespaces } from "../../mocks"; + +describe("NamespaceList", () => { + let wrapper: VueWrapper>; + let router: Router; + let namespacesStore: ReturnType; + + const mountWrapper = (mockNamespaceCount?: number) => { + router = createCleanAdminRouter(); + + wrapper = mountComponent(NamespaceList, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { + adminNamespaces: { + namespaces: mockNamespaces, + namespaceCount: mockNamespaceCount ?? mockNamespaces.length, + }, + }, + }, + }); + + namespacesStore = useNamespacesStore(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the data table", () => { + expect(wrapper.find('[data-test="namespaces-list"]').exists()).toBe(true); + }); + + it("displays namespace names", () => { + expect(wrapper.text()).toContain(mockNamespaces[0].name); + expect(wrapper.text()).toContain(mockNamespaces[1].name); + }); + + it("displays namespace tenant IDs", () => { + expect(wrapper.text()).toContain(mockNamespaces[0].tenant_id); + expect(wrapper.text()).toContain(mockNamespaces[1].tenant_id); + }); + + it("displays owner links", () => { + const ownerLinks = wrapper.findAll('[data-test="owner-link"]'); + expect(ownerLinks).toHaveLength(mockNamespaces.length); + }); + + it("displays device counts", () => { + const firstNamespaceCount = mockNamespaces[0].devices_accepted_count + + mockNamespaces[0].devices_pending_count + + mockNamespaces[0].devices_rejected_count; + + expect(wrapper.text()).toContain(firstNamespaceCount.toString()); + }); + + it("displays info buttons for each namespace", () => { + const infoButtons = wrapper.findAll('[data-test="info-button"]'); + expect(infoButtons).toHaveLength(mockNamespaces.length); + }); + + it("displays edit buttons for each namespace", () => { + const editButtons = wrapper.findAll('[data-test="namespace-edit-dialog-btn"]'); + expect(editButtons).toHaveLength(mockNamespaces.length); + }); + + it("displays delete buttons for each namespace", () => { + const deleteButtons = wrapper.findAll('[data-test="namespace-delete-dialog-btn"]'); + expect(deleteButtons).toHaveLength(mockNamespaces.length); + }); + }); + + describe("fetching namespaces", () => { + it("fetches namespaces on mount", () => { + mountWrapper(); + + expect(namespacesStore.fetchNamespaceList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 10, + page: 1, + }), + ); + }); + + it("refetches namespaces when page changes", async () => { + mountWrapper(11); // Mock total count to 11 to enable pagination + + // Click next page button + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(namespacesStore.fetchNamespaceList).toHaveBeenCalledWith( + expect.objectContaining({ + page: 2, + }), + ); + }); + + it("refetches namespaces when items per page changes", async () => { + mountWrapper(20); + + // Change items per page via combobox + const ippCombo = wrapper.find('[data-test="ipp-combo"] input'); + await ippCombo.setValue(20); + await flushPromises(); + + expect(namespacesStore.fetchNamespaceList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 20, + }), + ); + }); + }); + + describe("navigating to namespace details", () => { + it("navigates when clicking info button", async () => { + mountWrapper(); + + const pushSpy = vi.spyOn(router, "push"); + const infoButton = wrapper.findAll('[data-test="info-button"]')[0]; + + await infoButton.trigger("click"); + + expect(pushSpy).toHaveBeenCalledWith({ + name: "namespaceDetails", + params: { id: mockNamespaces[0].tenant_id }, + }); + }); + }); + + describe("navigating to user details", () => { + it("navigates when clicking owner link", async () => { + mountWrapper(); + + const pushSpy = vi.spyOn(router, "push"); + const ownerLink = wrapper.findAll('[data-test="owner-link"]')[0]; + + await ownerLink.trigger("click"); + + expect(pushSpy).toHaveBeenCalledWith( + expect.objectContaining({ + name: "userDetails", + params: { id: mockNamespaces[0].owner }, + }), + ); + }); + }); + + describe("opening dialogs", () => { + beforeEach(() => mountWrapper()); + + it("opens edit dialog when clicking edit button", async () => { + const editButton = wrapper.findAll('[data-test="namespace-edit-dialog-btn"]')[0]; + await editButton.trigger("click"); + await flushPromises(); + + // NamespaceEdit component should be rendered with modelValue true + expect(wrapper.findComponent({ name: "NamespaceEdit" }).exists()).toBe(true); + }); + + it("opens delete dialog when clicking delete button", async () => { + const deleteButton = wrapper.findAll('[data-test="namespace-delete-dialog-btn"]')[0]; + await deleteButton.trigger("click"); + await flushPromises(); + + // NamespaceDelete component should be rendered with modelValue true + expect(wrapper.findComponent({ name: "NamespaceDelete" }).exists()).toBe(true); + }); + }); + + describe("error handling", () => { + it("shows error snackbar when fetching namespaces fails", async () => { + mountWrapper(11); + vi.mocked(namespacesStore.fetchNamespaceList).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + // Trigger refetch by changing page + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to fetch namespaces."); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Session/SessionList.spec.ts b/ui/admin/tests/unit/components/Session/SessionList.spec.ts new file mode 100644 index 00000000000..d656e3fd23b --- /dev/null +++ b/ui/admin/tests/unit/components/Session/SessionList.spec.ts @@ -0,0 +1,182 @@ +import { describe, expect, it, vi, afterEach, beforeEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useSessionsStore from "@admin/store/modules/sessions"; +import SessionList from "@admin/components/Sessions/SessionList.vue"; +import { mockSessions } from "../../mocks"; + +describe("SessionList", () => { + let wrapper: VueWrapper>; + let sessionsStore: ReturnType; + let router: ReturnType; + + const mountWrapper = async (sessionCount = mockSessions.length) => { + router = createCleanAdminRouter(); + wrapper = mountComponent(SessionList, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { + adminSessions: { + sessions: sessionCount > 0 ? mockSessions : [], + sessionCount, + }, + }, + }, + }); + + sessionsStore = useSessionsStore(); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders session list table", () => { + const table = wrapper.find('[data-test="session-list"]'); + expect(table.exists()).toBe(true); + }); + + it("displays all sessions", () => { + const rows = wrapper.findAll('[data-test="tbody-has-items"] tr'); + expect(rows).toHaveLength(mockSessions.length); + }); + + it("displays session uid", () => { + const firstRow = wrapper.findAll('[data-test="tbody-has-items"] tr')[0]; + expect(firstRow.text()).toContain("session-1"); + }); + + it("displays device name", () => { + const firstRow = wrapper.findAll('[data-test="tbody-has-items"] tr')[0]; + expect(firstRow.text()).toContain("test-device"); + }); + + it("displays username", () => { + const firstRow = wrapper.findAll('[data-test="tbody-has-items"] tr')[0]; + expect(firstRow.text()).toContain("alice"); + }); + + it("displays IP address", () => { + const firstRow = wrapper.findAll('[data-test="tbody-has-items"] tr')[0]; + expect(firstRow.text()).toContain("192.168.1.100"); + }); + + it("shows active status icon for active sessions", () => { + const firstRow = wrapper.findAll('[data-test="tbody-has-items"] tr')[0]; + const activeIcon = firstRow.find(".mdi-check-circle"); + expect(activeIcon.exists()).toBe(true); + }); + + it("shows authenticated icon for authenticated sessions", () => { + const firstRow = wrapper.findAll('[data-test="tbody-has-items"] tr')[0]; + const authIcon = firstRow.find(".mdi-shield-check"); + expect(authIcon.exists()).toBe(true); + }); + + it("shows not authenticated icon for unauthenticated sessions", () => { + const secondRow = wrapper.findAll('[data-test="tbody-has-items"] tr')[1]; + const notAuthIcon = secondRow.find(".mdi-shield-alert"); + expect(notAuthIcon.exists()).toBe(true); + }); + + it("shows empty state when no sessions", async () => { + wrapper.unmount(); + await mountWrapper(0); + await flushPromises(); + + const emptyState = wrapper.find('[data-test="sessions-empty-state"]'); + expect(emptyState.exists()).toBe(true); + }); + }); + + describe("initial data loading", () => { + it("calls fetchSessionList on mount", async () => { + await mountWrapper(); + expect(sessionsStore.fetchSessionList).toHaveBeenCalledWith({ + perPage: 10, + page: 1, + }); + }); + }); + + describe("pagination", () => { + it("changes page when next button is clicked", async () => { + await mountWrapper(11); + + const nextBtn = wrapper.find('[data-test="pager-next"]'); + await nextBtn.trigger("click"); + await flushPromises(); + + expect(sessionsStore.fetchSessionList).toHaveBeenCalledWith({ + perPage: 10, + page: 2, + }); + }); + + it("changes items per page when selecting from dropdown", async () => { + await mountWrapper(20); + + // Select 20 items per page option (index 1) + const ippCombo = wrapper.find('[data-test="ipp-combo"] input'); + await ippCombo.setValue(20); + await flushPromises(); + + expect(sessionsStore.fetchSessionList).toHaveBeenCalledWith({ + perPage: 20, + page: 1, + }); + }); + }); + + describe("navigation", () => { + beforeEach(() => mountWrapper()); + + it("redirects to device details when clicking device name", async () => { + const pushSpy = vi.spyOn(router, "push"); + + const deviceLinks = wrapper.findAll('[data-test="device-link"]'); + await deviceLinks[0].trigger("click"); + await flushPromises(); + + expect(pushSpy).toHaveBeenCalledWith({ + name: "deviceDetails", + params: { id: "device-123" }, + }); + }); + + it("redirects to session details when clicking info icon", async () => { + const pushSpy = vi.spyOn(router, "push"); + + const infoButtons = wrapper.findAll('[data-test="info-button"]'); + await infoButtons[0].trigger("click"); + await flushPromises(); + + expect(pushSpy).toHaveBeenCalledWith({ + name: "sessionDetails", + params: { id: "session-1" }, + }); + }); + }); + + describe("error handling", () => { + it("shows error snackbar when fetching sessions fails", async () => { + mountComponent(SessionList, { global: { plugins: [createCleanAdminRouter()] } }); + + vi.mocked(useSessionsStore().fetchSessionList).mockRejectedValueOnce( + createAxiosError(500, "Network Error"), + ); + + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to fetch sessions list."); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Settings/SettingsAuthentication.spec.ts b/ui/admin/tests/unit/components/Settings/SettingsAuthentication.spec.ts new file mode 100644 index 00000000000..ad93f13e4f4 --- /dev/null +++ b/ui/admin/tests/unit/components/Settings/SettingsAuthentication.spec.ts @@ -0,0 +1,283 @@ +import { describe, expect, it, vi, afterEach, beforeEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useInstanceStore from "@admin/store/modules/instance"; +import SettingsAuthentication from "@admin/components/Settings/SettingsAuthentication.vue"; +import { mockAuthSettings, mockAuthSettingsLocalOnly } from "../../mocks"; + +// Mock window.open +const mockWindowOpen = vi.fn(); +Object.defineProperty(window, "open", { + writable: true, + value: mockWindowOpen, +}); + +// Mock URL methods +const mockCreateObjectURL = vi.fn(() => "blob:mock-url"); +const mockRevokeObjectURL = vi.fn(); +global.URL.createObjectURL = mockCreateObjectURL; +global.URL.revokeObjectURL = mockRevokeObjectURL; + +// Mock document.createElement for download +const mockClick = vi.fn(); +const mockCreateElement = document.createElement.bind(document); +document.createElement = vi.fn((tagName: string) => { + const element = mockCreateElement(tagName); + if (tagName === "a") element.click = mockClick; + return element; +}); + +describe("SettingsAuthentication", () => { + let wrapper: VueWrapper>; + let instanceStore: ReturnType; + + const mountWrapper = async (authSettings = mockAuthSettings) => { + wrapper = mountComponent(SettingsAuthentication, { + piniaOptions: { + initialState: { + adminInstance: { authenticationSettings: authSettings }, + }, + }, + }); + + instanceStore = useInstanceStore(); + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders page header", () => { + const header = wrapper.find('[title-test-id="auth-header"]'); + expect(header.exists()).toBe(true); + expect(header.text()).toContain("Authentication"); + }); + + it("renders authentication status section", () => { + const statusHeader = wrapper.find('[data-test="auth-status-header"]'); + expect(statusHeader.exists()).toBe(true); + expect(statusHeader.text()).toBe("Authentication Status"); + }); + + it("renders local authentication switch", () => { + const localAuthSwitch = wrapper.find('[data-test="local-auth-switch"]'); + expect(localAuthSwitch.exists()).toBe(true); + }); + + it("renders SAML authentication switch", () => { + const samlAuthSwitch = wrapper.find('[data-test="saml-auth-switch"]'); + expect(samlAuthSwitch.exists()).toBe(true); + }); + + it("shows SSO section when SAML is enabled", () => { + const ssoHeader = wrapper.find('[data-test="sso-header"]'); + expect(ssoHeader.exists()).toBe(true); + expect(ssoHeader.text()).toBe("Single Sign-on (SSO)"); + }); + + it("displays IdP SignOn POST URL", () => { + const postUrl = wrapper.find('[data-test="idp-signon-post-value"]'); + expect(postUrl.exists()).toBe(true); + expect(postUrl.text()).toBe(mockAuthSettings.saml.idp.binding.post); + }); + + it("displays IdP SignOn Redirect URL", () => { + const redirectUrl = wrapper.find('[data-test="idp-signon-redirect-value"]'); + expect(redirectUrl.exists()).toBe(true); + expect(redirectUrl.text()).toBe(mockAuthSettings.saml.idp.binding.redirect); + }); + + it("displays IdP Entity ID", () => { + const entityId = wrapper.find('[data-test="idp-entity-value"]'); + expect(entityId.exists()).toBe(true); + expect(entityId.text()).toBe(mockAuthSettings.saml.idp.entity_id); + }); + + it("displays SP certificate download button when certificate exists", () => { + const downloadBtn = wrapper.find('[data-test="download-certificate-btn"]'); + expect(downloadBtn.exists()).toBe(true); + }); + + it("hides SSO section when SAML is disabled", async () => { + wrapper.unmount(); + await mountWrapper(mockAuthSettingsLocalOnly); + + const ssoHeader = wrapper.find('[data-test="sso-header"]'); + expect(ssoHeader.exists()).toBe(false); + }); + }); + + describe("initial data loading", () => { + it("fetches authentication settings on mount", async () => { + await mountWrapper(); + expect(instanceStore.fetchAuthenticationSettings).toHaveBeenCalled(); + }); + }); + + describe("local authentication toggle", () => { + beforeEach(() => mountWrapper()); + + it("calls updateLocalAuthentication when toggling switch", async () => { + const localAuthSwitch = wrapper.find('[data-test="local-auth-switch"] input'); + await localAuthSwitch.trigger("click"); + await flushPromises(); + + expect(instanceStore.updateLocalAuthentication).toHaveBeenCalledWith(false); + }); + + it("shows error when trying to disable all authentication methods", async () => { + vi.mocked(instanceStore.updateLocalAuthentication).mockRejectedValueOnce( + createAxiosError(400, "Bad Request"), + ); + + const localAuthSwitch = wrapper.find('[data-test="local-auth-switch"] input'); + await localAuthSwitch.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith( + "You cannot disable all authentication methods.", + ); + }); + + it("shows generic error for other failures", async () => { + vi.mocked(instanceStore.updateLocalAuthentication).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const localAuthSwitch = wrapper.find('[data-test="local-auth-switch"] input'); + await localAuthSwitch.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith( + "An error occurred while updating local authentication.", + ); + }); + }); + + describe("SAML authentication toggle", () => { + beforeEach(() => mountWrapper()); + + it("disables SAML when toggling off", async () => { + const samlAuthSwitch = wrapper.find('[data-test="saml-auth-switch"] input'); + await samlAuthSwitch.trigger("click"); + await flushPromises(); + + expect(instanceStore.updateSamlAuthentication).toHaveBeenCalledWith({ + enable: false, + idp: { + entity_id: "", + binding: { post: "", redirect: "" }, + certificate: "", + }, + sp: { sign_requests: false }, + }); + }); + + it("opens SSO dialog when enabling SAML", async () => { + wrapper.unmount(); + await mountWrapper(mockAuthSettingsLocalOnly); + + const samlAuthSwitch = wrapper.find('[data-test="saml-auth-switch"] input'); + await samlAuthSwitch.trigger("click"); + await flushPromises(); + + const dialog = new DOMWrapper(document.body).find('[data-test="configure-sso-dialog"]'); + expect(dialog.exists()).toBe(true); + }); + + it("shows error when trying to disable all authentication methods", async () => { + vi.mocked(instanceStore.updateSamlAuthentication).mockRejectedValueOnce( + createAxiosError(400, "Bad Request"), + ); + + const samlAuthSwitch = wrapper.find('[data-test="saml-auth-switch"] input'); + await samlAuthSwitch.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith( + "You cannot disable all authentication methods.", + ); + }); + + it("shows generic error for other failures", async () => { + vi.mocked(instanceStore.updateSamlAuthentication).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const samlAuthSwitch = wrapper.find('[data-test="saml-auth-switch"] input'); + await samlAuthSwitch.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith( + "An error occurred while updating local authentication.", + ); + }); + }); + + describe("SSO configuration", () => { + beforeEach(() => mountWrapper()); + + it("opens SSO dialog when clicking configure button", async () => { + const configBtn = wrapper.find('[data-test="sso-config-btn"]'); + await configBtn.trigger("click"); + await flushPromises(); + + const dialog = new DOMWrapper(document.body).find('[data-test="configure-sso-dialog"]'); + expect(dialog.exists()).toBe(true); + }); + + it("shows 'Edit' text when SAML is enabled", () => { + const configBtn = wrapper.find('[data-test="sso-config-btn"]'); + expect(configBtn.text()).toBe("Edit"); + }); + }); + + describe("SP certificate download", () => { + beforeEach(() => mountWrapper()); + + it("downloads certificate when clicking download button", async () => { + const downloadBtn = wrapper.find('[data-test="download-certificate-btn"]'); + await downloadBtn.trigger("click"); + await flushPromises(); + + expect(mockCreateObjectURL).toHaveBeenCalled(); + expect(mockClick).toHaveBeenCalled(); + expect(mockRevokeObjectURL).toHaveBeenCalledWith("blob:mock-url"); + }); + + it("shows error when no certificate is available", async () => { + wrapper.unmount(); + const settingsWithoutCert = { + ...mockAuthSettings, + saml: { + ...mockAuthSettings.saml, + sp: { + ...mockAuthSettings.saml.sp, + certificate: "", + }, + }, + }; + await mountWrapper(settingsWithoutCert); + + const downloadBtn = wrapper.find('[data-test="download-certificate-btn"]'); + expect(downloadBtn.exists()).toBe(false); + }); + }); + + describe("authentication URL redirect", () => { + it("opens authentication URL in new tab when clicking test button", async () => { + await mountWrapper(); + const redirectBtn = wrapper.find('[data-test="redirect-auth-btn"]'); + await redirectBtn.trigger("click"); + await flushPromises(); + + expect(mockWindowOpen).toHaveBeenCalledWith(mockAuthSettings.saml.auth_url, "_blank"); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/Settings/SettingsLicense.spec.ts b/ui/admin/tests/unit/components/Settings/SettingsLicense.spec.ts new file mode 100644 index 00000000000..489b1703703 --- /dev/null +++ b/ui/admin/tests/unit/components/Settings/SettingsLicense.spec.ts @@ -0,0 +1,260 @@ +import { describe, expect, it, vi, afterEach, beforeEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useLicenseStore from "@admin/store/modules/license"; +import SettingsLicense from "@admin/components/Settings/SettingsLicense.vue"; +import { + mockLicense, + mockLicenseExpired, + mockLicenseAboutToExpire, + mockLicenseGracePeriod, + mockLicenseRegional, + mockNoLicense, +} from "../../mocks"; +import { IAdminLicense } from "@admin/interfaces/ILicense"; +import * as licenseApi from "@admin/store/api/license"; + +vi.mock("@admin/store/api/license"); + +describe("SettingsLicense", () => { + let wrapper: VueWrapper>; + let licenseStore: ReturnType; + + const mountWrapper = async (license: Partial = mockLicense) => { + wrapper = mountComponent(SettingsLicense, { piniaOptions: { initialState: { adminLicense: { license } } } }); + + licenseStore = useLicenseStore(); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders page header", () => { + const header = wrapper.find('[title-test-id="license-header"]'); + expect(header.exists()).toBe(true); + expect(header.text()).toContain("License Details"); + }); + + it("renders license card", () => { + const card = wrapper.find('[data-test="license-card"]'); + expect(card.exists()).toBe(true); + }); + + it("displays issued at date", () => { + const issuedAt = wrapper.find('[data-test="issued-at-field"]'); + expect(issuedAt.exists()).toBe(true); + }); + + it("displays starts at date", () => { + const startsAt = wrapper.find('[data-test="starts-at-field"]'); + expect(startsAt.exists()).toBe(true); + }); + + it("displays expires at date", () => { + const expiresAt = wrapper.find('[data-test="expires-at-field"]'); + expect(expiresAt.exists()).toBe(true); + }); + + it("displays global license badge when no regions specified", () => { + const globalBadge = wrapper.text(); + expect(globalBadge).toContain("Global"); + }); + + it("displays regional license badge with regions", async () => { + wrapper.unmount(); + await mountWrapper(mockLicenseRegional); + + const badge = wrapper.text(); + expect(badge).toContain("Limited"); + expect(badge).toContain("US, EU"); + }); + + it("displays customer id", () => { + const customerId = wrapper.find('[data-test="id"]'); + expect(customerId.exists()).toBe(true); + }); + + it("displays customer name", () => { + const customerName = wrapper.find('[data-test="name"]'); + expect(customerName.exists()).toBe(true); + }); + + it("displays customer email", () => { + const customerEmail = wrapper.find('[data-test="email"]'); + expect(customerEmail.exists()).toBe(true); + }); + + it("displays customer company", () => { + const customerCompany = wrapper.find('[data-test="company"]'); + expect(customerCompany.exists()).toBe(true); + }); + + it("displays devices feature", () => { + const devices = wrapper.find('[data-test="devices"]'); + expect(devices.exists()).toBe(true); + }); + + it("displays session_recording feature", () => { + const sessionRecording = wrapper.find('[data-test="session_recording"]'); + expect(sessionRecording.exists()).toBe(true); + }); + + it("displays firewall_rules feature", () => { + const firewallRules = wrapper.find('[data-test="firewall_rules"]'); + expect(firewallRules.exists()).toBe(true); + }); + + it("shows included icon for enabled boolean features", () => { + const includedIcons = wrapper.findAll('[data-test="included-icon"]'); + expect(includedIcons.length).toBeGreaterThan(0); + }); + + it("shows not included icon for disabled boolean features", () => { + const notIncludedIcons = wrapper.findAll('[data-test="not-included-icon"]'); + expect(notIncludedIcons.length).toBeGreaterThan(0); + }); + }); + + describe("license alerts", () => { + it("shows no license alert when license not installed", async () => { + await mountWrapper(mockNoLicense); + + const alert = wrapper.find('[data-test="license-alert"]'); + expect(alert.exists()).toBe(true); + expect(alert.text()).toContain("You do not have an installed license"); + }); + + it("shows about to expire alert", async () => { + await mountWrapper(mockLicenseAboutToExpire); + + const alert = wrapper.find('[data-test="license-alert"]'); + expect(alert.exists()).toBe(true); + expect(alert.text()).toContain("Your license is about to expire"); + }); + + it("shows grace period alert when expired in grace period", async () => { + await mountWrapper(mockLicenseGracePeriod); + + const alert = wrapper.find('[data-test="license-alert"]'); + expect(alert.exists()).toBe(true); + expect(alert.text()).toContain("expired, but you are still within the grace period"); + }); + + it("shows expired alert when license expired", async () => { + await mountWrapper(mockLicenseExpired); + + const alert = wrapper.find('[data-test="license-alert"]'); + expect(alert.exists()).toBe(true); + expect(alert.text()).toContain("Your license has expired!"); + }); + + it("shows no alert when license is valid", async () => { + await mountWrapper(mockLicense); + + const alert = wrapper.find('[data-test="license-alert"]'); + expect(alert.exists()).toBe(false); + }); + }); + + describe("initial data loading", () => { + it("fetches license on mount", async () => { + await mountWrapper(); + expect(licenseStore.getLicense).toHaveBeenCalled(); + }); + + it("shows error when license fetch fails", async () => { + vi.mocked(licenseApi.getLicense).mockRejectedValue( + createAxiosError(500, "Internal Server Error"), + ); + mountComponent(SettingsLicense, { piniaOptions: { stubActions: false } }); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Error loading license."); + }); + }); + + describe("file upload", () => { + beforeEach(() => mountWrapper()); + + it("renders file input", () => { + const fileInput = wrapper.find('input[type="file"]'); + expect(fileInput.exists()).toBe(true); + }); + + it("accepts only .dat files", () => { + const fileInput = wrapper.find('input[type="file"]'); + expect(fileInput.attributes("accept")).toBe(".dat"); + }); + + it("has upload button disabled by default", () => { + const uploadBtn = wrapper.find('[data-test="upload-license-btn"]'); + expect(uploadBtn.attributes("disabled")).toBeDefined(); + }); + + it("uploads license file successfully", async () => { + const file = new File(["license content"], "license.dat", { type: "application/octet-stream" }); + const fileInput = wrapper.find('input[type="file"]'); + + Object.defineProperty(fileInput.element, "files", { + value: [file], + writable: false, + }); + await fileInput.trigger("change"); + await flushPromises(); + + const uploadBtn = wrapper.find('[data-test="upload-license-btn"]'); + await uploadBtn.trigger("click"); + await flushPromises(); + + expect(licenseStore.uploadLicense).toHaveBeenCalledWith(file); + expect(licenseStore.getLicense).toHaveBeenCalled(); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("License uploaded successfully."); + }); + + it("shows error when upload fails", async () => { + vi.mocked(licenseStore.uploadLicense).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const file = new File(["license content"], "license.dat", { type: "application/octet-stream" }); + const fileInput = wrapper.find('input[type="file"]'); + + Object.defineProperty(fileInput.element, "files", { + value: [file], + writable: false, + }); + await fileInput.trigger("change"); + await flushPromises(); + + const uploadBtn = wrapper.find('[data-test="upload-license-btn"]'); + await uploadBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to upload the license."); + }); + }); + + describe("conditional rendering", () => { + it("hides license details when no license installed", async () => { + await mountWrapper(mockNoLicense); + + const issuedAt = wrapper.find('[data-test="issued-at-field"]'); + expect(issuedAt.exists()).toBe(false); + }); + + it("shows license details when license installed", async () => { + await mountWrapper(mockLicense); + + const issuedAt = wrapper.find('[data-test="issued-at-field"]'); + expect(issuedAt.exists()).toBe(true); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/User/UserDelete.spec.ts b/ui/admin/tests/unit/components/User/UserDelete.spec.ts new file mode 100644 index 00000000000..614cf2417d6 --- /dev/null +++ b/ui/admin/tests/unit/components/User/UserDelete.spec.ts @@ -0,0 +1,164 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useUsersStore from "@admin/store/modules/users"; +import UserDelete from "@admin/components/User/UserDelete.vue"; +import { Router } from "vue-router"; + +const triggerButtonTemplate = ` + +`; + +describe("UserDelete", () => { + let wrapper: VueWrapper>; + let usersStore: ReturnType; + let router: Router; + const mockUserId = "user-123"; + + const mountWrapper = (props: { redirect?: boolean; showTooltip?: boolean } = {}) => { + router = createCleanAdminRouter(); + + wrapper = mountComponent(UserDelete, { + global: { plugins: [router] }, + props: { + id: mockUserId, + ...props, + }, + slots: { default: triggerButtonTemplate }, + attachTo: document.body, + }); + + usersStore = useUsersStore(); + }; + + const openDialog = async () => { + await wrapper.find('[data-test="trigger-button"]').trigger("click"); + return new DOMWrapper(document.body).find('[role="dialog"]'); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the slot content", () => { + const trigger = wrapper.find('[data-test="trigger-button"]'); + expect(trigger.exists()).toBe(true); + expect(trigger.text()).toBe("Delete"); + }); + + it("does not show the dialog initially", () => { + expect(new DOMWrapper(document.body).find('[role="dialog"]').exists()).toBe(false); + }); + + it("shows tooltip when showTooltip prop is true", () => { + wrapper.unmount(); + mountWrapper({ showTooltip: true }); + + const tooltip = wrapper.findComponent({ name: "VTooltip" }); + expect(tooltip.exists()).toBe(true); + expect(tooltip.props("text")).toBe("Remove"); + expect(tooltip.props("disabled")).toBe(false); + }); + + it("disables tooltip when showTooltip prop is false", () => { + const tooltip = wrapper.findComponent({ name: "VTooltip" }); + expect(tooltip.props("disabled")).toBe(true); + }); + }); + + describe("opening dialog", () => { + beforeEach(() => mountWrapper()); + + it("shows dialog when clicking the trigger", async () => { + const dialog = await openDialog(); + + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Are you sure?"); + expect(dialog.text()).toContain("You are about to remove this user"); + }); + }); + + describe("deleting user without redirect", () => { + beforeEach(() => mountWrapper({ redirect: false })); + + it("calls store actions and shows success message on confirm", async () => { + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.deleteUser).toHaveBeenCalledWith(mockUserId); + expect(usersStore.fetchUsersList).toHaveBeenCalled(); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("User removed successfully."); + }); + + it("emits update event after successful deletion", async () => { + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update")).toBeTruthy(); + }); + + it("shows error message when delete fails", async () => { + vi.mocked(usersStore.deleteUser).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to remove the user."); + expect(wrapper.emitted("update")).toBeUndefined(); + }); + }); + + describe("deleting user with redirect", () => { + beforeEach(() => mountWrapper({ redirect: true })); + + it("redirects to users page after successful deletion", async () => { + const pushSpy = vi.spyOn(router, "push"); + + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.deleteUser).toHaveBeenCalledWith(mockUserId); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("User removed successfully."); + expect(pushSpy).toHaveBeenCalledWith("/users"); + }); + + it("does not redirect when delete fails", async () => { + vi.mocked(usersStore.deleteUser).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + const pushSpy = vi.spyOn(router, "push"); + + const dialog = await openDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to remove the user."); + expect(pushSpy).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/User/UserExport.spec.ts b/ui/admin/tests/unit/components/User/UserExport.spec.ts new file mode 100644 index 00000000000..a749a9a479a --- /dev/null +++ b/ui/admin/tests/unit/components/User/UserExport.spec.ts @@ -0,0 +1,373 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import { saveAs } from "file-saver"; +import useUsersStore from "@admin/store/modules/users"; +import UserExport from "@admin/components/User/UserExport.vue"; + +vi.mock("file-saver", () => ({ + saveAs: vi.fn(), +})); + +describe("UserExport", () => { + let wrapper: VueWrapper>; + let usersStore: ReturnType; + + const mountWrapper = () => { + wrapper = mountComponent(UserExport, { + attachTo: document.body, + }); + + usersStore = useUsersStore(); + }; + + const getDialog = () => new DOMWrapper(document.body).find('[role="dialog"]'); + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the export button", () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + expect(exportBtn.exists()).toBe(true); + expect(exportBtn.text()).toContain("Export CSV"); + }); + + it("does not show dialog initially", () => { + expect(getDialog().exists()).toBe(false); + }); + }); + + describe("opening dialog", () => { + beforeEach(() => mountWrapper()); + + it("shows dialog when clicking export button", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Export users data"); + }); + + it("displays all filter options", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + expect(dialog.text()).toContain("Users with more than:"); + expect(dialog.text()).toContain("Users with exactly:"); + }); + + it("has 'more than' filter selected by default", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const moreThanRadio = dialog.find('[data-test="radio-more-than"] input'); + expect((moreThanRadio.element as HTMLInputElement).checked).toBe(true); + }); + + it("shows number of namespaces input with default value 0", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-namespaces-input"] input'); + expect((numberInput.element as HTMLInputElement).value).toBe("0"); + }); + }); + + describe("filter selection", () => { + beforeEach(() => mountWrapper()); + + it("allows selecting 'exactly' filter", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const exactlyRadio = dialog.find('[data-test="radio-exactly"] input'); + await exactlyRadio.setValue(true); + await flushPromises(); + + expect((exactlyRadio.element as HTMLInputElement).checked).toBe(true); + }); + + it("allows switching back to 'more than' filter", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + + // Select exactly filter first + const exactlyRadio = dialog.find('[data-test="radio-exactly"] input'); + await exactlyRadio.setValue(true); + await flushPromises(); + + // Then select "more than" again + const moreThanRadio = dialog.find('[data-test="radio-more-than"] input'); + await moreThanRadio.setValue(true); + await flushPromises(); + + expect((moreThanRadio.element as HTMLInputElement).checked).toBe(true); + }); + }); + + describe("form validation", () => { + beforeEach(() => mountWrapper()); + + it("shows error for negative number", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-namespaces-input"] input'); + await numberInput.setValue("-1"); + await flushPromises(); + + expect(dialog.text()).toContain("this must be greater than or equal to 0"); + }); + + it("accepts zero as valid input", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-namespaces-input"] input'); + await numberInput.setValue("0"); + await flushPromises(); + + expect(dialog.text()).not.toContain("must be greater than or equal to 0"); + }); + + it("accepts positive numbers", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-namespaces-input"] input'); + await numberInput.setValue("10"); + await flushPromises(); + + expect(dialog.text()).not.toContain("must be greater than or equal to 0"); + }); + + it("disables export button when validation fails", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-namespaces-input"] input'); + await numberInput.setValue("-1"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + expect(confirmBtn.attributes("disabled")).toBeDefined(); + }); + }); + + describe("exporting users", () => { + it("exports with 'more than' filter", async () => { + mountWrapper(); + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-namespaces-input"] input'); + await numberInput.setValue("5"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.exportUsersToCsv).toHaveBeenCalledWith(expect.any(String)); + expect(saveAs).toHaveBeenCalledWith( + expect.any(Blob), + "users_with_more_than_5_namespaces.csv", + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Exported users successfully."); + }); + + it("exports with 'exactly' filter", async () => { + mountWrapper(); + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const exactlyRadio = dialog.find('[data-test="radio-exactly"] input'); + await exactlyRadio.setValue(true); + await flushPromises(); + + const numberInput = dialog.find('[data-test="number-of-namespaces-input"] input'); + await numberInput.setValue("3"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.exportUsersToCsv).toHaveBeenCalled(); + expect(saveAs).toHaveBeenCalledWith( + expect.any(Blob), + "users_with_exactly_3_namespaces.csv", + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Exported users successfully."); + }); + + it("exports with zero namespaces", async () => { + mountWrapper(); + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.exportUsersToCsv).toHaveBeenCalled(); + expect(saveAs).toHaveBeenCalledWith( + expect.any(Blob), + "users_with_more_than_0_namespaces.csv", + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Exported users successfully."); + }); + + it("closes dialog after successful export", async () => { + mountWrapper(); + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + const dialogContent = getDialog().find(".v-overlay__content"); + expect(dialogContent.attributes("style")).toContain("display: none;"); + }); + }); + + describe("error handling", () => { + it("shows error snackbar when export fails", async () => { + mountWrapper(); + vi.mocked(usersStore.exportUsersToCsv).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to export users."); + expect(saveAs).not.toHaveBeenCalled(); + }); + + it("keeps dialog open when export fails", async () => { + mountWrapper(); + vi.mocked(usersStore.exportUsersToCsv).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(getDialog().exists()).toBe(true); + }); + }); + + describe("closing dialog", () => { + beforeEach(() => mountWrapper()); + + it("closes dialog when cancel button is clicked", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + const dialogContent = getDialog().find(".v-overlay__content"); + expect(dialogContent.attributes("style")).toContain("display: none;"); + }); + + it("resets form when dialog is closed and reopened", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + let dialog = getDialog(); + const numberInput = dialog.find('[data-test="number-of-namespaces-input"] input'); + await numberInput.setValue("42"); + await flushPromises(); + + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + // Reopen dialog + await exportBtn.trigger("click"); + await flushPromises(); + + dialog = getDialog(); + const numberInputReopened = dialog.find('[data-test="number-of-namespaces-input"] input'); + expect((numberInputReopened.element as HTMLInputElement).value).toBe("0"); + }); + + it("resets filter selection when dialog is closed and reopened", async () => { + const exportBtn = wrapper.find('[data-test="users-export-btn"]'); + await exportBtn.trigger("click"); + await flushPromises(); + + let dialog = getDialog(); + const exactlyRadio = dialog.find('[data-test="radio-exactly"] input'); + await exactlyRadio.setValue(true); + await flushPromises(); + + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + // Reopen dialog + await exportBtn.trigger("click"); + await flushPromises(); + + dialog = getDialog(); + const moreThanRadio = dialog.find('[data-test="radio-more-than"] input'); + expect((moreThanRadio.element as HTMLInputElement).checked).toBe(true); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/User/UserFormDialog.spec.ts b/ui/admin/tests/unit/components/User/UserFormDialog.spec.ts new file mode 100644 index 00000000000..5de05010f34 --- /dev/null +++ b/ui/admin/tests/unit/components/User/UserFormDialog.spec.ts @@ -0,0 +1,578 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useUsersStore from "@admin/store/modules/users"; +import UserFormDialog from "@admin/components/User/UserFormDialog.vue"; +import { mockUser, mockInvitedUser, mockNotConfirmedUser } from "../../mocks"; + +describe("UserFormDialog", () => { + let wrapper: VueWrapper>; + let usersStore: ReturnType; + + const getDialog = () => new DOMWrapper(document.body).find('[role="dialog"]'); + + const openDialog = async () => { + const buttonSelector = wrapper.props("createUser") ? '[data-test="user-add-btn"]' : '[data-test="user-edit-btn"]'; + const openBtn = wrapper.find(buttonSelector); + await openBtn.trigger("click"); + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("create user mode", () => { + const mountWrapper = () => { + wrapper = mountComponent(UserFormDialog, { + props: { createUser: true }, + attachTo: document.body, + }); + + usersStore = useUsersStore(); + }; + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the add user button", () => { + const addBtn = wrapper.find('[data-test="user-add-btn"]'); + expect(addBtn.exists()).toBe(true); + expect(addBtn.text()).toContain("Add User"); + }); + + it("does not show dialog initially", () => { + expect(getDialog().exists()).toBe(false); + }); + }); + + describe("opening dialog", () => { + beforeEach(async () => { + mountWrapper(); + await openDialog(); + }); + + it("shows dialog when clicking add button", () => { + const dialog = getDialog(); + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Add new user"); + }); + + it("shows all form fields", () => { + const dialog = getDialog(); + expect(dialog.find('[data-test="name-field"]').exists()).toBe(true); + expect(dialog.find('[data-test="username-field"]').exists()).toBe(true); + expect(dialog.find('[data-test="email-field"]').exists()).toBe(true); + expect(dialog.find('[data-test="password-field"]').exists()).toBe(true); + expect(dialog.find('[data-test="is-admin-checkbox"]').exists()).toBe(true); + }); + + it("does not show user confirmed checkbox in create mode", () => { + const dialog = getDialog(); + expect(dialog.find('[data-test="is-confirmed-checkbox"]').exists()).toBe(false); + }); + + it("shows create button", () => { + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + expect(confirmBtn.exists()).toBe(true); + expect(confirmBtn.text()).toContain("Create"); + }); + }); + + describe("form validation", () => { + beforeEach(async () => { + mountWrapper(); + await openDialog(); + }); + + it("shows error when name is empty", async () => { + const dialog = getDialog(); + const nameInput = dialog.find('[data-test="name-field"] input'); + await nameInput.setValue(""); + await nameInput.trigger("blur"); + await flushPromises(); + + expect(dialog.text()).toContain("this is a required field"); + }); + + it("shows error for invalid email", async () => { + const dialog = getDialog(); + const emailInput = dialog.find('[data-test="email-field"] input'); + await emailInput.setValue("invalid-email"); + await emailInput.trigger("blur"); + await flushPromises(); + + expect(dialog.text()).toContain("this must be a valid email"); + }); + + it("accepts valid email", async () => { + const dialog = getDialog(); + const emailInput = dialog.find('[data-test="email-field"] input'); + await emailInput.setValue("valid@example.com"); + await emailInput.trigger("blur"); + await flushPromises(); + + expect(dialog.text()).not.toContain("this must be a valid email"); + }); + + it("shows error when username is empty", async () => { + const dialog = getDialog(); + const usernameInput = dialog.find('[data-test="username-field"] input'); + await usernameInput.setValue(""); + await usernameInput.trigger("blur"); + await flushPromises(); + + expect(dialog.text()).toContain("this is a required field"); + }); + + it("shows error when password is empty in create mode", async () => { + const dialog = getDialog(); + const passwordInput = dialog.find('[data-test="password-field"] input'); + await passwordInput.setValue(""); + await passwordInput.trigger("blur"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + expect(confirmBtn.attributes("disabled")).toBeDefined(); + }); + + it("shows error when password is less than 6 characters", async () => { + const dialog = getDialog(); + const passwordInput = dialog.find('[data-test="password-field"] input'); + await passwordInput.setValue("12345"); + await passwordInput.trigger("blur"); + await flushPromises(); + + expect(dialog.text()).toContain("Password must be at least 6 characters"); + }); + + it("accepts password with 6 or more characters", async () => { + const dialog = getDialog(); + const passwordInput = dialog.find('[data-test="password-field"] input'); + await passwordInput.setValue("123456"); + await passwordInput.trigger("blur"); + await flushPromises(); + + expect(dialog.text()).not.toContain("Password must be at least 6 characters"); + }); + + it("shows error when max namespaces field is empty", async () => { + const dialog = getDialog(); + const changeLimitCheckbox = dialog.find('[data-test="change-namespace-limit-checkbox"] input'); + await changeLimitCheckbox.setValue(true); + await flushPromises(); + + const numberInput = dialog.find('[data-test="max-namespaces-input"] input'); + await numberInput.setValue(""); + await numberInput.trigger("blur"); + await flushPromises(); + + expect(dialog.text()).toContain("This field is required"); + }); + }); + + describe("password visibility", () => { + beforeEach(async () => { + mountWrapper(); + await openDialog(); + }); + + it("hides password by default", () => { + const dialog = getDialog(); + const passwordInput = dialog.find('[data-test="password-field"] input'); + expect(passwordInput.attributes("type")).toBe("password"); + }); + + it("toggles password visibility when clicking the eye icon", async () => { + const dialog = getDialog(); + let passwordInput = dialog.find('[data-test="password-field"] input'); + expect(passwordInput.attributes("type")).toBe("password"); + + const eyeIcon = dialog.find('[data-test="password-field"] .mdi-eye-off'); + await eyeIcon.trigger("click"); + await flushPromises(); + + passwordInput = dialog.find('[data-test="password-field"] input'); + expect(passwordInput.attributes("type")).toBe("text"); + }); + }); + + describe("namespace limit options", () => { + beforeEach(async () => { + mountWrapper(); + await openDialog(); + }); + + it("shows namespace limit field when checkbox is checked", async () => { + const dialog = getDialog(); + const checkbox = dialog.find('[data-test="change-namespace-limit-checkbox"] input'); + await checkbox.setValue(true); + await flushPromises(); + + expect(dialog.find('[data-test="max-namespaces-input"]').exists()).toBe(true); + }); + + it("shows disable namespace creation checkbox when change limit is checked", async () => { + const dialog = getDialog(); + const checkbox = dialog.find('[data-test="change-namespace-limit-checkbox"] input'); + await checkbox.setValue(true); + await flushPromises(); + + expect(dialog.find('[data-test="disable-namespace-creation-checkbox"]').exists()).toBe(true); + }); + + it("disables number input when disable namespace creation is checked", async () => { + const dialog = getDialog(); + const changeLimitCheckbox = dialog.find('[data-test="change-namespace-limit-checkbox"] input'); + await changeLimitCheckbox.setValue(true); + await flushPromises(); + + const disableCheckbox = dialog.find('[data-test="disable-namespace-creation-checkbox"] input'); + await disableCheckbox.setValue(true); + await flushPromises(); + + const numberInput = dialog.find('[data-test="max-namespaces-input"] input'); + expect(numberInput.attributes("disabled")).toBeDefined(); + }); + }); + + describe("creating user", () => { + beforeEach(async () => { + mountWrapper(); + await openDialog(); + }); + + it("calls store action and shows success message on submit", async () => { + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("New User"); + await dialog.find('[data-test="username-field"] input').setValue("newuser"); + await dialog.find('[data-test="email-field"] input').setValue("newuser@example.com"); + await dialog.find('[data-test="password-field"] input').setValue("password123"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.addUser).toHaveBeenCalledWith( + expect.objectContaining({ + name: "New User", + username: "newuser", + email: "newuser@example.com", + password: "password123", + }), + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("User added successfully."); + }); + + it("fetches user list after successful creation", async () => { + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("New User"); + await dialog.find('[data-test="username-field"] input').setValue("newuser"); + await dialog.find('[data-test="email-field"] input').setValue("newuser@example.com"); + await dialog.find('[data-test="password-field"] input').setValue("password123"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.fetchUsersList).toHaveBeenCalled(); + }); + + it("includes admin flag when admin checkbox is checked", async () => { + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("Admin User"); + await dialog.find('[data-test="username-field"] input').setValue("adminuser"); + await dialog.find('[data-test="email-field"] input').setValue("admin@example.com"); + await dialog.find('[data-test="password-field"] input').setValue("password123"); + await dialog.find('[data-test="is-admin-checkbox"] input').setValue(true); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.addUser).toHaveBeenCalledWith( + expect.objectContaining({ + admin: true, + }), + ); + }); + + it("includes namespace limit when specified", async () => { + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("Limited User"); + await dialog.find('[data-test="username-field"] input').setValue("limited"); + await dialog.find('[data-test="email-field"] input').setValue("limited@example.com"); + await dialog.find('[data-test="password-field"] input').setValue("password123"); + + const changeLimitCheckbox = dialog.find('[data-test="change-namespace-limit-checkbox"] input'); + await changeLimitCheckbox.setValue(true); + await flushPromises(); + + const numberInput = dialog.find('[data-test="max-namespaces-input"] input'); + await numberInput.setValue("5"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.addUser).toHaveBeenCalledWith( + expect.objectContaining({ + max_namespaces: 5, + }), + ); + }); + }); + + describe("error handling", () => { + beforeEach(async () => { + mountWrapper(); + await openDialog(); + }); + + it("shows error message when creation fails", async () => { + vi.mocked(usersStore.addUser).mockRejectedValueOnce( + createAxiosError(400, "Bad Request", ["name"]), + ); + + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("New User"); + await dialog.find('[data-test="username-field"] input').setValue("newuser"); + await dialog.find('[data-test="email-field"] input').setValue("newuser@example.com"); + await dialog.find('[data-test="password-field"] input').setValue("password123"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to submit the user data."); + }); + + it("shows field-specific error for duplicate username", async () => { + vi.mocked(usersStore.addUser).mockRejectedValueOnce(createAxiosError(400, "Bad Request", ["username"])); + + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("New User"); + await dialog.find('[data-test="username-field"] input').setValue("duplicate"); + await dialog.find('[data-test="email-field"] input').setValue("newuser@example.com"); + await dialog.find('[data-test="password-field"] input').setValue("password123"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(dialog.text()).toContain("This username is invalid!"); + }); + }); + + describe("closing dialog", () => { + it("closes dialog and resets form when cancel is clicked", async () => { + mountWrapper(); + await openDialog(); + + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("Test User"); + await flushPromises(); + + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + const dialogContent = getDialog().find(".v-overlay__content"); + expect(dialogContent.attributes("style")).toContain("display: none;"); + }); + }); + }); + + describe("edit user mode", () => { + const mountWrapper = (user = mockUser) => { + wrapper = mountComponent(UserFormDialog, { + props: { + createUser: false, + user, + }, + piniaOptions: { initialState: { adminAuth: { currentUser: "testuser" } } }, + attachTo: document.body, + }); + + usersStore = useUsersStore(); + }; + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the edit button", () => { + const editBtn = wrapper.find('[data-test="user-edit-btn"]'); + expect(editBtn.exists()).toBe(true); + }); + + it("shows dialog with Edit title when clicking edit button", async () => { + await openDialog(); + + const dialog = getDialog(); + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Edit user"); + }); + + it("displays current user values in form", async () => { + await openDialog(); + + const dialog = getDialog(); + expect((dialog.find('[data-test="name-field"] input').element as HTMLInputElement).value).toBe(mockUser.name); + expect((dialog.find('[data-test="username-field"] input').element as HTMLInputElement).value).toBe(mockUser.username); + expect((dialog.find('[data-test="email-field"] input').element as HTMLInputElement).value).toBe(mockUser.email); + }); + + it("shows update button in edit mode", async () => { + await openDialog(); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + expect(confirmBtn.text()).toContain("Update"); + }); + + it("shows user confirmed checkbox in edit mode", async () => { + wrapper.unmount(); + mountWrapper(mockNotConfirmedUser); + + await openDialog(); + + const dialog = getDialog(); + expect(dialog.find('[data-test="is-confirmed-checkbox"]').exists()).toBe(true); + }); + }); + + describe("user confirmation status", () => { + it("enables confirmed checkbox for not-confirmed users", async () => { + mountWrapper(mockNotConfirmedUser); + + await openDialog(); + + const dialog = getDialog(); + const confirmedCheckbox = dialog.find('[data-test="is-confirmed-checkbox"] input'); + expect(confirmedCheckbox.attributes("disabled")).toBeUndefined(); + }); + + it("disables confirmed checkbox for already confirmed users", async () => { + mountWrapper(mockUser); + + await openDialog(); + await flushPromises(); + + const dialog = getDialog(); + const confirmedCheckbox = dialog.find('[data-test="is-confirmed-checkbox"] input'); + expect(confirmedCheckbox.attributes("disabled")).toBeDefined(); + }); + + it("disables confirmed checkbox for invited users", async () => { + mountWrapper(mockInvitedUser); + + await openDialog(); + + const dialog = getDialog(); + const confirmedCheckbox = dialog.find('[data-test="is-confirmed-checkbox"] input'); + expect(confirmedCheckbox.attributes("disabled")).toBeDefined(); + }); + }); + + describe("admin privileges", () => { + it("allows changing admin status for other users", async () => { + const otherUser = { ...mockUser, username: "otheruser", admin: false }; + mountWrapper(otherUser); + + await openDialog(); + + const dialog = getDialog(); + const adminCheckbox = dialog.find('[data-test="is-admin-checkbox"] input'); + expect(adminCheckbox.attributes("disabled")).toBeUndefined(); + }); + + it("prevents current user from removing their own admin privileges", async () => { + mountWrapper(mockUser); + + await openDialog(); + + const dialog = getDialog(); + const adminCheckbox = dialog.find('[data-test="is-admin-checkbox"] input'); + expect(adminCheckbox.attributes("disabled")).toBeDefined(); + }); + + describe("namespace limit options", () => { + it("starts with namespace creation disabled if user has max_namespaces set to 0", async () => { + mountWrapper({ ...mockUser, max_namespaces: 0 }); + await openDialog(); + + const dialog = getDialog(); + const disableCheckbox = dialog.find('[data-test="disable-namespace-creation-checkbox"] input'); + expect((disableCheckbox.element as HTMLInputElement).checked).toBe(true); + }); + }); + }); + + describe("updating user", () => { + beforeEach(async () => { + mountWrapper(); + await openDialog(); + }); + + it("allows updating user without changing password", async () => { + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("Updated Name"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + expect(confirmBtn.attributes("disabled")).toBeUndefined(); + }); + + it("validates password minimum length when password is changed", async () => { + const dialog = getDialog(); + const passwordInput = dialog.find('[data-test="password-field"] input'); + await passwordInput.setValue("12345"); + await passwordInput.trigger("blur"); + await flushPromises(); + + expect(dialog.text()).toContain("Password must be at least 6 characters"); + }); + + it("calls store action with updated data", async () => { + const dialog = getDialog(); + await dialog.find('[data-test="name-field"] input').setValue("Updated Name"); + await flushPromises(); + + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.updateUser).toHaveBeenCalledWith( + expect.objectContaining({ + name: "Updated Name", + id: mockUser.id, + }), + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("User updated successfully."); + }); + + it("shows error when update fails", async () => { + vi.mocked(usersStore.updateUser).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error", []), + ); + + const dialog = getDialog(); + const confirmBtn = dialog.find('[data-test="confirm-btn"]'); + await confirmBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to submit the user data."); + }); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/User/UserList.spec.ts b/ui/admin/tests/unit/components/User/UserList.spec.ts new file mode 100644 index 00000000000..8f2e5665ce2 --- /dev/null +++ b/ui/admin/tests/unit/components/User/UserList.spec.ts @@ -0,0 +1,280 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { Router } from "vue-router"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useUsersStore from "@admin/store/modules/users"; +import useAuthStore from "@admin/store/modules/auth"; +import UserList from "@admin/components/User/UserList.vue"; +import { mockUsers } from "../../mocks"; +import { IAdminUser } from "@admin/interfaces/IUser"; + +const mockSAMLUser: IAdminUser = { + ...mockUsers[0], + id: "user-saml", + username: "samluser", + preferences: { + auth_methods: ["saml"], + }, +}; + +const mockLocalUser: IAdminUser = { + ...mockUsers[0], + id: "user-local", + username: "localuser", + preferences: { + auth_methods: ["local"], + }, +}; + +const testUsers = [mockSAMLUser, mockLocalUser]; + +describe("UserList", () => { + let wrapper: VueWrapper>; + let router: Router; + let usersStore: ReturnType; + let authStore: ReturnType; + + const mountWrapper = (mockUserCount?: number) => { + router = createCleanAdminRouter(); + + wrapper = mountComponent(UserList, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { + adminUsers: { + users: testUsers, + usersCount: mockUserCount ?? testUsers.length, + }, + }, + }, + }); + + usersStore = useUsersStore(); + authStore = useAuthStore(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the data table", () => { + expect(wrapper.find('[data-test="users-list"]').exists()).toBe(true); + }); + + it("displays user names", () => { + const nameCells = wrapper.findAll('[data-test="name-cell"]'); + expect(nameCells[0].text()).toBe(testUsers[0].name); + expect(nameCells[1].text()).toBe(testUsers[1].name); + }); + + it("displays user emails", () => { + const emailCells = wrapper.findAll('[data-test="email-cell"]'); + expect(emailCells[0].text()).toBe(testUsers[0].email); + expect(emailCells[1].text()).toBe(testUsers[1].email); + }); + + it("displays user usernames", () => { + const usernameCells = wrapper.findAll('[data-test="username-cell"]'); + expect(usernameCells[0].text()).toBe(testUsers[0].username); + expect(usernameCells[1].text()).toBe(testUsers[1].username); + }); + + it("displays user status chips", () => { + const statusChips = wrapper.findAllComponents({ name: "UserStatusChip" }); + expect(statusChips).toHaveLength(testUsers.length); + }); + + it("displays info buttons for each user", () => { + const infoButtons = wrapper.findAll('[data-test="info-button"]'); + expect(infoButtons).toHaveLength(testUsers.length); + }); + + it("displays edit buttons for each user", () => { + const editButtons = wrapper.findAll('[data-test="user-edit-btn"]'); + expect(editButtons).toHaveLength(testUsers.length); + }); + + it("displays login buttons for each user", () => { + const loginButtons = wrapper.findAll('[data-test="login-button"]'); + expect(loginButtons).toHaveLength(testUsers.length); + }); + + it("displays delete buttons for each user", () => { + const deleteComponents = wrapper.findAllComponents({ name: "UserDelete" }); + expect(deleteComponents).toHaveLength(testUsers.length); + }); + + it("displays reset password button only for SAML users", () => { + const resetPasswordComponents = wrapper.findAllComponents({ name: "UserResetPassword" }); + // Only SAML user should have reset password button + expect(resetPasswordComponents).toHaveLength(1); + }); + }); + + describe("fetching users", () => { + it("fetches users on mount", () => { + mountWrapper(); + + expect(usersStore.fetchUsersList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 10, + page: 1, + }), + ); + }); + + it("refetches users when page changes", async () => { + mountWrapper(11); // Mock total count to 11 to enable pagination + + // Click next page button + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.fetchUsersList).toHaveBeenCalledWith( + expect.objectContaining({ + page: 2, + }), + ); + }); + + it("refetches users when items per page changes", async () => { + mountWrapper(20); + + // Change items per page via combobox + const ippCombo = wrapper.find('[data-test="ipp-combo"] input'); + await ippCombo.setValue(20); + await flushPromises(); + + expect(usersStore.fetchUsersList).toHaveBeenCalledWith( + expect.objectContaining({ + perPage: 20, + }), + ); + }); + }); + + describe("navigating to user details", () => { + beforeEach(() => mountWrapper()); + + it("navigates when clicking info button", async () => { + const pushSpy = vi.spyOn(router, "push"); + const infoButton = wrapper.findAll('[data-test="info-button"]')[0]; + + await infoButton.trigger("click"); + + expect(pushSpy).toHaveBeenCalledWith({ + name: "userDetails", + params: { id: testUsers[0].id }, + }); + }); + }); + + describe("login with token", () => { + const windowOpenSpy = vi.spyOn(window, "open").mockImplementation(() => null); + + beforeEach(() => { + mountWrapper(); + vi.mocked(authStore.getLoginToken).mockResolvedValue("mock-token-123"); + }); + + it("gets login token and opens new window when clicking login button", async () => { + const loginButton = wrapper.findAll('[data-test="login-button"]')[0]; + await loginButton.trigger("click"); + await flushPromises(); + + expect(authStore.getLoginToken).toHaveBeenCalledWith(testUsers[0].id); + expect(windowOpenSpy).toHaveBeenCalledWith( + "/login?token=mock-token-123", + "_target", + ); + }); + + it("shows error when getting login token fails", async () => { + vi.mocked(authStore.getLoginToken).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const loginButton = wrapper.findAll('[data-test="login-button"]')[0]; + await loginButton.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to get the login token."); + expect(windowOpenSpy).not.toHaveBeenCalled(); + }); + }); + + describe("opening edit dialog", () => { + beforeEach(() => mountWrapper()); + + it("renders UserFormDialog components for each user", () => { + const formDialogs = wrapper.findAllComponents({ name: "UserFormDialog" }); + expect(formDialogs).toHaveLength(testUsers.length); + }); + + it("passes user data to UserFormDialog", () => { + const formDialogs = wrapper.findAllComponents({ name: "UserFormDialog" }); + expect(formDialogs[0].props("user")).toEqual(testUsers[0]); + expect(formDialogs[1].props("user")).toEqual(testUsers[1]); + }); + }); + + describe("user deletion", () => { + beforeEach(() => mountWrapper()); + + it("renders UserDelete component for each user", () => { + const deleteComponents = wrapper.findAllComponents({ name: "UserDelete" }); + expect(deleteComponents).toHaveLength(testUsers.length); + }); + + it("passes user id to UserDelete component", () => { + const deleteComponents = wrapper.findAllComponents({ name: "UserDelete" }); + expect(deleteComponents[0].props("id")).toBe(testUsers[0].id); + expect(deleteComponents[1].props("id")).toBe(testUsers[1].id); + }); + }); + + describe("reset password for SAML users", () => { + beforeEach(() => mountWrapper()); + + it("shows reset password button only for users with SAML-only authentication", () => { + const resetPasswordComponents = wrapper.findAllComponents({ name: "UserResetPassword" }); + + // Should only show for SAML user + expect(resetPasswordComponents).toHaveLength(1); + expect(resetPasswordComponents[0].props("userId")).toBe(testUsers[0].id); + }); + + it("refetches users after password reset", async () => { + const resetPasswordComponent = wrapper.findComponent({ name: "UserResetPassword" }); + const postResetFetchSpy = vi.spyOn(usersStore, "fetchUsersList"); + await resetPasswordComponent.vm.$emit("update"); + await flushPromises(); + + // Should have been called once on mount and once after update + expect(postResetFetchSpy).toHaveBeenCalled(); + }); + }); + + describe("error handling", () => { + it("shows error snackbar when fetching users fails", async () => { + mountWrapper(11); + vi.mocked(usersStore.fetchUsersList).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + // Trigger refetch by changing page + const nextPageBtn = wrapper.find('[data-test="pager-next"]'); + await nextPageBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to fetch users."); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/User/UserResetPassword.spec.ts b/ui/admin/tests/unit/components/User/UserResetPassword.spec.ts new file mode 100644 index 00000000000..2869ec24630 --- /dev/null +++ b/ui/admin/tests/unit/components/User/UserResetPassword.spec.ts @@ -0,0 +1,307 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { DOMWrapper, VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useUsersStore from "@admin/store/modules/users"; +import UserResetPassword from "@admin/components/User/UserResetPassword.vue"; + +describe("UserResetPassword", () => { + let wrapper: VueWrapper>; + let usersStore: ReturnType; + const mockUserId = "user-123"; + const mockGeneratedPassword = "generated-password-456"; + + const mountWrapper = () => { + wrapper = mountComponent(UserResetPassword, { + props: { userId: mockUserId }, + attachTo: document.body, + }); + + usersStore = useUsersStore(); + }; + + const getDialog = () => new DOMWrapper(document.body).find('[role="dialog"]'); + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + document.body.innerHTML = ""; + }); + + describe("rendering", () => { + beforeEach(() => mountWrapper()); + + it("renders the trigger icon button", () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + expect(triggerBtn.exists()).toBe(true); + }); + + it("does not show dialog initially", () => { + expect(getDialog().exists()).toBe(false); + }); + }); + + describe("opening dialog", () => { + beforeEach(() => mountWrapper()); + + it("shows dialog when clicking the trigger button", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + expect(dialog.exists()).toBe(true); + expect(dialog.text()).toContain("Enable Local Authentication"); + }); + + it("displays step 1 content with confirmation message", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + expect(dialog.text()).toContain("This action will enable local authentication"); + expect(dialog.text()).toContain("generate a new password"); + }); + + it("shows enable and cancel buttons in step 1", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + expect(dialog.find('[data-test="enable-btn"]').exists()).toBe(true); + expect(dialog.find('[data-test="cancel-btn"]').exists()).toBe(true); + }); + }); + + describe("enabling local authentication", () => { + beforeEach(() => { + mountWrapper(); + vi.mocked(usersStore.resetUserPassword).mockResolvedValue(mockGeneratedPassword); + }); + + it("calls store action when clicking enable button", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + expect(usersStore.resetUserPassword).toHaveBeenCalledWith(mockUserId); + }); + + it("proceeds to step 2 after successful password reset", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + // Step 2 should show the warning alert + expect(dialog.find('[data-test="password-warning"]').exists()).toBe(true); + }); + + it("displays generated password in step 2", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + const passwordField = dialog.find('[data-test="generated-password-field"] input'); + expect((passwordField.element as HTMLInputElement).value).toBe(mockGeneratedPassword); + }); + + it("shows password warning alert in step 2", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + const warning = dialog.find('[data-test="password-warning"]'); + expect(warning.exists()).toBe(true); + expect(warning.text()).toContain("Users are strongly encouraged to change this password"); + }); + + it("shows close button in step 2", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + expect(dialog.find('[data-test="close-btn"]').exists()).toBe(true); + expect(dialog.find('[data-test="enable-btn"]').exists()).toBe(false); + expect(dialog.find('[data-test="cancel-btn"]').exists()).toBe(false); + }); + + it("password field is readonly", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + const passwordField = dialog.find('[data-test="generated-password-field"] input'); + expect(passwordField.attributes("readonly")).toBeDefined(); + }); + }); + + describe("error handling", () => { + beforeEach(() => mountWrapper()); + + it("shows error message when password reset fails", async () => { + vi.mocked(usersStore.resetUserPassword).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to reset user password. Please try again."); + }); + + it("stays on step 1 when password reset fails", async () => { + vi.mocked(usersStore.resetUserPassword).mockRejectedValueOnce( + createAxiosError(500, "Internal Server Error"), + ); + + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + // Should still be on step 1 with enable button visible + expect(dialog.find('[data-test="enable-btn"]').exists()).toBe(true); + expect(dialog.find('[data-test="password-warning"]').exists()).toBe(false); + }); + }); + + describe("closing dialog", () => { + beforeEach(() => { + mountWrapper(); + vi.mocked(usersStore.resetUserPassword).mockResolvedValue(mockGeneratedPassword); + }); + + it("closes dialog and resets to step 1 when clicking cancel in step 1", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + const dialogContent = getDialog().find(".v-overlay__content"); + expect(dialogContent.attributes("style")).toContain("display: none;"); + }); + + it("emits update event when closing from step 1", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const cancelBtn = dialog.find('[data-test="cancel-btn"]'); + await cancelBtn.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update")).toBeTruthy(); + }); + + it("closes dialog and resets to step 1 when clicking close in step 2", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + // Now in step 2 + const closeBtn = dialog.find('[data-test="close-btn"]'); + await closeBtn.trigger("click"); + await flushPromises(); + + const dialogContent = getDialog().find(".v-overlay__content"); + expect(dialogContent.attributes("style")).toContain("display: none;"); + }); + + it("emits update event when closing from step 2", async () => { + const triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + const dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + // Now in step 2 + const closeBtn = dialog.find('[data-test="close-btn"]'); + await closeBtn.trigger("click"); + await flushPromises(); + + expect(wrapper.emitted("update")).toBeTruthy(); + }); + + it("resets to step 1 when reopening dialog after closing from step 2", async () => { + // Open and go to step 2 + let triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + let dialog = getDialog(); + const enableBtn = dialog.find('[data-test="enable-btn"]'); + await enableBtn.trigger("click"); + await flushPromises(); + + // Close from step 2 + const closeBtn = dialog.find('[data-test="close-btn"]'); + await closeBtn.trigger("click"); + await flushPromises(); + + // Reopen + triggerBtn = wrapper.find('[data-test="open-dialog-icon"]'); + await triggerBtn.trigger("click"); + await flushPromises(); + + dialog = getDialog(); + + // Should be back to step 1 with enable button + expect(dialog.find('[data-test="enable-btn"]').exists()).toBe(true); + }); + }); +}); diff --git a/ui/admin/tests/unit/components/User/UserStatusChip.spec.ts b/ui/admin/tests/unit/components/User/UserStatusChip.spec.ts new file mode 100644 index 00000000000..e2c965b3595 --- /dev/null +++ b/ui/admin/tests/unit/components/User/UserStatusChip.spec.ts @@ -0,0 +1,91 @@ +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { VueWrapper } from "@vue/test-utils"; +import { mountComponent } from "@tests/utils/mount"; +import UserStatusChip from "@admin/components/User/UserStatusChip.vue"; +import { UserStatus } from "@admin/interfaces/IUser"; + +describe("UserStatusChip", () => { + let wrapper: VueWrapper>; + + const mountWrapper = (status: UserStatus) => { + wrapper = mountComponent(UserStatusChip, { props: { status } }); + }; + + afterEach(() => { wrapper?.unmount(); }); + + describe("confirmed status", () => { + beforeEach(() => mountWrapper("confirmed")); + + it("renders chip with success color", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.props("color")).toBe("success"); + }); + + it("displays confirmed icon", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.props("prependIcon")).toBe("mdi-checkbox-marked-circle"); + }); + + it("displays confirmed label", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.text()).toBe("Confirmed"); + }); + }); + + describe("invited status", () => { + beforeEach(() => mountWrapper("invited")); + + it("renders chip with warning color", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.props("color")).toBe("warning"); + }); + + it("displays invited icon", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.props("prependIcon")).toBe("mdi-email-alert"); + }); + + it("displays invited label", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.text()).toBe("Invited"); + }); + }); + + describe("not-confirmed status", () => { + beforeEach(() => mountWrapper("not-confirmed")); + + it("renders chip with error color", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.props("color")).toBe("error"); + }); + + it("displays not-confirmed icon", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.props("prependIcon")).toBe("mdi-alert-circle"); + }); + + it("displays not-confirmed label", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.text()).toBe("Not Confirmed"); + }); + }); + + describe("invalid status fallback", () => { + beforeEach(() => mountWrapper("invalid-status" as UserStatus)); + + it("falls back to not-confirmed color for invalid status", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.props("color")).toBe("error"); + }); + + it("falls back to not-confirmed icon for invalid status", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.props("prependIcon")).toBe("mdi-alert-circle"); + }); + + it("falls back to not-confirmed label for invalid status", () => { + const chip = wrapper.findComponent({ name: "VChip" }); + expect(chip.text()).toBe("Not Confirmed"); + }); + }); +}); diff --git a/ui/admin/tests/unit/layouts/AppLayout.spec.ts b/ui/admin/tests/unit/layouts/AppLayout.spec.ts new file mode 100644 index 00000000000..81f0704b7d5 --- /dev/null +++ b/ui/admin/tests/unit/layouts/AppLayout.spec.ts @@ -0,0 +1,329 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { Router } from "vue-router"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import useAuthStore from "@admin/store/modules/auth"; +import useLayoutStore from "@/store/modules/layout"; +import AppLayout from "@admin/layouts/AppLayout.vue"; +import { VApp } from "vuetify/components"; + +const Component = { template: "" }; + +// Mock window.location for router tests +Object.defineProperty(window, "location", { + value: { + href: "http://localhost:3000/admin/", + pathname: "/admin/", + search: "", + hash: "", + }, + writable: true, +}); + +describe("AppLayout", () => { + let wrapper: VueWrapper; + let router: Router; + let authStore: ReturnType; + let layoutStore: ReturnType; + + const mountWrapper = (initialState = {}) => { + router = createCleanAdminRouter(); + + wrapper = mountComponent(Component, { + global: { + plugins: [router], + components: { AppLayout, "v-app": VApp }, + }, + piniaOptions: { + initialState: { + adminAuth: { + token: "dummy-token", + currentUser: "admin@example.com", + isAdmin: true, + }, + adminLicense: { license: { expired: false } }, + spinner: { status: false }, + layout: { theme: "dark" }, + ...initialState, + }, + }, + }); + + authStore = useAuthStore(); + layoutStore = useLayoutStore(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("rendering when logged in", () => { + beforeEach(() => mountWrapper()); + + it("renders the navigation drawer", () => { + const drawer = wrapper.findComponent({ name: "VNavigationDrawer" }); + expect(drawer.exists()).toBe(true); + }); + + it("displays the logo in the drawer toolbar", () => { + const toolbar = wrapper.find('[data-test="drawer-toolbar"]'); + expect(toolbar.exists()).toBe(true); + expect(toolbar.findComponent({ name: "VImg" }).exists()).toBe(true); + }); + + it("renders the navigation list", () => { + const list = wrapper.find('[data-test="list"]'); + expect(list.exists()).toBe(true); + }); + + it("displays all menu items when license is not expired", () => { + const listItems = wrapper.findAll('[data-test="list-item"]'); + // Dashboard, Users, Devices, Sessions, Firewall Rules, Namespaces, Announcements (if enabled) + expect(listItems.length).toBeGreaterThan(6); + }); + + it("displays settings menu with children", () => { + const listGroups = wrapper.findAll('[data-test="list-group"]'); + expect(listGroups.length).toBe(1); // Settings has children + }); + + it("displays AppBarContent component", () => { + const appBarContent = wrapper.findComponent({ name: "AppBarContent" }); + expect(appBarContent.exists()).toBe(true); + }); + + it("passes correct props to AppBarContent", () => { + const appBarContent = wrapper.findComponent({ name: "AppBarContent" }); + expect(appBarContent.props("showMenuToggle")).toBe(true); + expect(appBarContent.props("showSupport")).toBe(true); + }); + + it("displays UserMenu component", () => { + const userMenu = wrapper.findComponent({ name: "UserMenu" }); + expect(userMenu.exists()).toBe(true); + }); + + it("passes user info to UserMenu", () => { + const userMenu = wrapper.findComponent({ name: "UserMenu" }); + expect(userMenu.props("userEmail")).toBe("admin@example.com"); + expect(userMenu.props("displayName")).toBe("admin@example.com"); + }); + + it("displays Namespace component with admin context", () => { + const namespace = wrapper.findComponent({ name: "Namespace" }); + expect(namespace.exists()).toBe(true); + expect(namespace.props("isAdminContext")).toBe(true); + }); + + it("renders main content area", () => { + const main = wrapper.find('[data-test="main"]'); + expect(main.exists()).toBe(true); + }); + + it("renders container for router view", () => { + const container = wrapper.find('[data-test="container"]'); + expect(container.exists()).toBe(true); + }); + }); + + describe("rendering when not logged in", () => { + beforeEach(() => { + mountWrapper({ + adminAuth: { + token: "", + currentUser: "", + isAdmin: false, + }, + }); + }); + + it("does not render navigation drawer when not logged in", () => { + const drawer = wrapper.findComponent({ name: "VNavigationDrawer" }); + expect(drawer.exists()).toBe(false); + }); + + it("still renders AppBarContent", () => { + const appBarContent = wrapper.findComponent({ name: "AppBarContent" }); + expect(appBarContent.exists()).toBe(true); + }); + + it("still renders main content area", () => { + const main = wrapper.find('[data-test="main"]'); + expect(main.exists()).toBe(true); + }); + }); + + describe("expired license behavior", () => { + beforeEach(() => { + mountWrapper({ + adminLicense: { + isExpired: true, + license: {}, + }, + }); + }); + + it("shows only Settings menu item when license is expired", () => { + const listItems = wrapper.findAll('[data-test="list-item"]'); + // Only Settings submenu items should be visible + expect(listItems.length).toBeLessThan(3); + }); + + it("shows only License submenu under Settings when license is expired", () => { + const licenseItem = wrapper.find('[data-test="License-listItem"]'); + expect(licenseItem.exists()).toBe(true); + }); + + it("does not show Authentication submenu when license is expired", () => { + const authItem = wrapper.find('[data-test="Authentication-listItem"]'); + expect(authItem.exists()).toBe(false); + }); + }); + + describe("spinner overlay", () => { + beforeEach(() => { + mountWrapper({ + spinner: { + status: true, + }, + }); + }); + + it("shows spinner overlay when spinner status is true", () => { + const overlay = wrapper.find('[data-test="overlay"]'); + expect(overlay.attributes("style")).not.toContain("display: none"); + }); + + it("displays progress circular in spinner overlay", () => { + const progressCircular = wrapper.find('[data-test="progress-circular"]'); + expect(progressCircular.exists()).toBe(true); + }); + }); + + describe("light mode", () => { + beforeEach(() => mountWrapper({ layout: { theme: "light" } })); + + it("toggles theme when UserMenu emits toggle-dark-mode", async () => { + const userMenu = wrapper.findComponent({ name: "UserMenu" }); + + await userMenu.vm.$emit("toggle-dark-mode"); + await flushPromises(); + + expect(layoutStore.setTheme).toHaveBeenCalled(); + }); + }); + + describe("user menu interactions", () => { + beforeEach(() => { + mountWrapper(); + }); + + it("navigates to license page when license menu item is selected", async () => { + const pushSpy = vi.spyOn(router, "push"); + const userMenu = wrapper.findComponent({ name: "UserMenu" }); + + const licenseMenuItem = { + icon: "mdi-license", + title: "License", + type: "path", + path: "/settings/license", + method: () => { }, + }; + + await userMenu.vm.$emit("select", licenseMenuItem); + await flushPromises(); + + expect(pushSpy).toHaveBeenCalledWith("/settings/license"); + }); + + it("calls logout when logout menu item is selected", async () => { + const userMenu = wrapper.findComponent({ name: "UserMenu" }); + + const logoutMenuItem = { + icon: "mdi-logout", + title: "Logout", + type: "method", + path: "", + method: vi.fn(() => { + authStore.logout(); + window.location.href = "/login"; + }), + }; + + await userMenu.vm.$emit("select", logoutMenuItem); + await flushPromises(); + + expect(logoutMenuItem.method).toHaveBeenCalled(); + }); + }); + + describe("support functionality", () => { + beforeEach(() => { + mountWrapper(); + }); + + it("opens ShellHub help page when support is clicked", async () => { + const windowOpenSpy = vi.spyOn(window, "open").mockImplementation(() => null); + const appBarContent = wrapper.findComponent({ name: "AppBarContent" }); + + await appBarContent.vm.$emit("support-click"); + await flushPromises(); + + expect(windowOpenSpy).toHaveBeenCalledWith( + "https://github.com/shellhub-io/shellhub/issues/new/choose", + "_blank", + ); + }); + }); + + describe("menu navigation", () => { + beforeEach(() => mountWrapper()); + + it("navigates to dashboard when logo is clicked", async () => { + const logoLink = wrapper.find('[data-test="drawer-toolbar"] a'); + + await logoLink.trigger("click"); + await flushPromises(); + + expect(logoLink.attributes("href")).toContain("/"); + }); + + it("displays menu items with correct icons", () => { + const icons = wrapper.findAll('[data-test="icon"]'); + expect(icons.length).toBeGreaterThan(0); + }); + }); + + describe("Settings menu behavior", () => { + beforeEach(() => mountWrapper()); + + it("shows both Authentication and License in Settings submenu", () => { + const authItem = wrapper.find('[data-test="Authentication-listItem"]'); + const licenseItem = wrapper.find('[data-test="License-listItem"]'); + + expect(authItem.exists()).toBe(true); + expect(licenseItem.exists()).toBe(true); + }); + + it("disables Settings menu when user is not admin", async () => { + wrapper.unmount(); + mountWrapper({ + adminAuth: { + token: "not-admin-token", + currentUser: "user@example.com", + isAdmin: false, + }, + }); + + await flushPromises(); + + const listGroup = wrapper.findComponent('[data-test="list-group"]'); + // When the user is not an admin the Settings item is the only one in the list group + const settingsListItem = listGroup.findComponent({ name: "VListItem" }); + + expect(settingsListItem.props("disabled")).toBe(true); + }); + }); +}); diff --git a/ui/admin/tests/unit/mocks/announcement.ts b/ui/admin/tests/unit/mocks/announcement.ts new file mode 100644 index 00000000000..da297d6fe89 --- /dev/null +++ b/ui/admin/tests/unit/mocks/announcement.ts @@ -0,0 +1,20 @@ +import { IAdminAnnouncement, IAdminAnnouncementShort } from "@admin/interfaces/IAnnouncement"; + +export const mockAnnouncement: IAdminAnnouncement = { + uuid: "announcement-123", + title: "Test Announcement", + content: "## ShellHub new features \n - New feature 1 \n - New feature 2", + date: "2026-01-14T08:00:00.000Z", +}; + +export const mockAnnouncementShort: IAdminAnnouncementShort = { + uuid: "announcement-123", + title: "Test Announcement", + date: "2026-01-14T08:00:00.000Z", +}; + +export const mockAnnouncements: IAdminAnnouncementShort[] = [ + { ...mockAnnouncementShort, uuid: "announcement-1", title: "Announcement One" }, + { ...mockAnnouncementShort, uuid: "announcement-2", title: "Announcement Two" }, + { ...mockAnnouncementShort, uuid: "announcement-3", title: "Announcement Three" }, +]; diff --git a/ui/admin/tests/unit/mocks/authSettings.ts b/ui/admin/tests/unit/mocks/authSettings.ts new file mode 100644 index 00000000000..281404a7f41 --- /dev/null +++ b/ui/admin/tests/unit/mocks/authSettings.ts @@ -0,0 +1,53 @@ +export const mockAuthSettings = { + local: { + enabled: true, + }, + saml: { + enabled: true, + auth_url: "https://auth.example.com/sso", + assertion_url: "http://localhost:3000/api/user/saml/auth", + idp: { + entity_id: "https://idp.example.com/entity", + binding: { + post: "https://idp.example.com/sso/post", + redirect: "https://idp.example.com/sso/redirect", + }, + certificates: ["-----BEGIN CERTIFICATE-----\nMIIDXTCCAkWgAwIBAgIJAKZQ..."], + mappings: { + email: "emailAddress", + name: "displayName", + }, + }, + sp: { + sign_auth_requests: true, + certificate: "-----BEGIN CERTIFICATE-----\nSP_CERT_CONTENT\n-----END CERTIFICATE-----", + }, + }, +}; + +export const mockAuthSettingsLocalOnly = { + local: { + enabled: true, + }, + saml: { + enabled: false, + auth_url: "", + assertion_url: "", + idp: { + entity_id: "", + binding: { + post: "", + redirect: "", + }, + certificates: [], + mappings: { + email: "", + name: "", + }, + }, + sp: { + sign_auth_requests: false, + certificate: "", + }, + }, +}; diff --git a/ui/admin/tests/unit/mocks/device.ts b/ui/admin/tests/unit/mocks/device.ts new file mode 100644 index 00000000000..b1f0cac1ae6 --- /dev/null +++ b/ui/admin/tests/unit/mocks/device.ts @@ -0,0 +1,36 @@ +import { IAdminDevice } from "@admin/interfaces/IDevice"; + +export const mockDevice: IAdminDevice = { + uid: "device-123", + name: "test-device", + identity: { + mac: "00:11:22:33:44:55", + }, + info: { + id: "device-123", + pretty_name: "Ubuntu 22.04", + version: "v1.0.0", + arch: "x86_64", + platform: "linux", + }, + public_key: "ssh-rsa AAAA...", + tenant_id: "tenant-123", + last_seen: "2024-01-10T12:00:00Z", + online: true, + namespace: "default", + status: "accepted", + status_updated_at: "2024-01-01T00:00:00Z", + created_at: "2024-01-01T00:00:00Z", + remote_addr: "192.168.1.100", + position: { + latitude: 0, + longitude: 0, + }, + tags: [], +}; + +export const mockDevices: IAdminDevice[] = [ + { ...mockDevice, uid: "device-1", name: "device-one" }, + { ...mockDevice, uid: "device-2", name: "device-two", online: false }, + { ...mockDevice, uid: "device-3", name: "device-three" }, +]; diff --git a/ui/admin/tests/unit/mocks/firewallRule.ts b/ui/admin/tests/unit/mocks/firewallRule.ts new file mode 100644 index 00000000000..17f74d35777 --- /dev/null +++ b/ui/admin/tests/unit/mocks/firewallRule.ts @@ -0,0 +1,20 @@ +import { IAdminFirewallRule } from "@admin/interfaces/IFirewallRule"; + +export const mockFirewallRule: IAdminFirewallRule = { + id: "rule-123", + tenant_id: "tenant-123", + priority: 1, + action: "allow" as const, + active: true, + source_ip: "192.168.1.0/24", + username: "testuser", + filter: { + hostname: ".*", + }, +}; + +export const mockFirewallRules: IAdminFirewallRule[] = [ + { ...mockFirewallRule, id: "rule-1", priority: 1, action: "allow" as const }, + { ...mockFirewallRule, id: "rule-2", priority: 2, action: "deny" as const, active: false }, + { ...mockFirewallRule, id: "rule-3", priority: 3, action: "allow" as const }, +]; diff --git a/ui/admin/tests/unit/mocks/index.ts b/ui/admin/tests/unit/mocks/index.ts new file mode 100644 index 00000000000..7436a9acdbb --- /dev/null +++ b/ui/admin/tests/unit/mocks/index.ts @@ -0,0 +1,9 @@ +export * from "./user"; +export * from "./device"; +export * from "./session"; +export * from "./namespace"; +export * from "./announcement"; +export * from "./firewallRule"; +export * from "./stats"; +export * from "./authSettings"; +export * from "./license"; diff --git a/ui/admin/tests/unit/mocks/license.ts b/ui/admin/tests/unit/mocks/license.ts new file mode 100644 index 00000000000..50dbe9197ac --- /dev/null +++ b/ui/admin/tests/unit/mocks/license.ts @@ -0,0 +1,51 @@ +import { IAdminLicense } from "@admin/interfaces/ILicense"; + +export const mockLicense: IAdminLicense = { + id: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + expired: false, + about_to_expire: false, + grace_period: false, + issued_at: 1704067200, // Jan 1, 2024 + starts_at: 1704067200, + expires_at: 1735689600, // Jan 1, 2025 + allowed_regions: [], + customer: { + id: "customer-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + name: "Test Customer", + email: "test@example.com", + company: "Test Company Inc.", + }, + features: { + devices: -1, + session_recording: true, + firewall_rules: true, + billing: false, + reports: false, + login_link: false, + }, +}; + +export const mockLicenseExpired: IAdminLicense = { + ...mockLicense, + expired: true, + grace_period: false, + expires_at: 1672531200, // Jan 1, 2023 (past date) +}; + +export const mockLicenseAboutToExpire: IAdminLicense = { + ...mockLicense, + about_to_expire: true, +}; + +export const mockLicenseGracePeriod: IAdminLicense = { + ...mockLicense, + expired: true, + grace_period: true, +}; + +export const mockLicenseRegional: IAdminLicense = { + ...mockLicense, + allowed_regions: ["US", "EU"] as IAdminLicense["allowed_regions"], +}; + +export const mockNoLicense: Partial = {}; diff --git a/ui/admin/tests/unit/mocks/namespace.ts b/ui/admin/tests/unit/mocks/namespace.ts new file mode 100644 index 00000000000..6b9f831655d --- /dev/null +++ b/ui/admin/tests/unit/mocks/namespace.ts @@ -0,0 +1,37 @@ +import { IAdminNamespace } from "@admin/interfaces/INamespace"; + +export const mockNamespace: IAdminNamespace = { + name: "test-namespace", + owner: "owner-123", + tenant_id: "tenant-123", + members: [ + { + id: "user-1", + role: "owner" as const, + email: "alice@example.com", + added_at: "2024-01-01T00:00:00Z", + expires_at: "0001-01-01T00:00:00Z", + }, + { + id: "user-2", + role: "observer" as const, + email: "bob@example.com", + added_at: "2024-01-01T00:00:00Z", + expires_at: "0001-01-01T00:00:00Z", + }, + ], + max_devices: 10, + devices_accepted_count: 3, + devices_pending_count: 0, + devices_rejected_count: 0, + created_at: "2024-01-01T00:00:00Z", + billing: undefined, + settings: { session_record: true, connection_announcement: "Welcome!" }, + type: "team" as const, +}; + +export const mockNamespaces: IAdminNamespace[] = [ + { ...mockNamespace, name: "namespace-one", tenant_id: "tenant-1" }, + { ...mockNamespace, name: "namespace-two", tenant_id: "tenant-2", devices_accepted_count: 5 }, + { ...mockNamespace, name: "namespace-three", tenant_id: "tenant-3", devices_accepted_count: 0 }, +]; diff --git a/ui/admin/tests/unit/mocks/session.ts b/ui/admin/tests/unit/mocks/session.ts new file mode 100644 index 00000000000..425dbdfeba5 --- /dev/null +++ b/ui/admin/tests/unit/mocks/session.ts @@ -0,0 +1,48 @@ +import { IAdminSession } from "@admin/interfaces/ISession"; + +export const mockSession: IAdminSession = { + uid: "session-123", + device_uid: "device-123", + device: { + uid: "device-123", + name: "test-device", + identity: { + mac: "00:11:22:33:44:55", + }, + info: { + id: "device-123", + pretty_name: "Ubuntu 22.04", + version: "v1.0.0", + arch: "x86_64", + platform: "linux", + }, + public_key: "ssh-rsa AAAA...", + tenant_id: "tenant-123", + last_seen: "2024-01-10T12:00:00Z", + online: true, + namespace: "default", + status: "accepted" as const, + created_at: "2024-01-01T00:00:00Z", + status_updated_at: "2024-01-01T00:00:00Z", + remote_addr: "192.168.1.100", + position: { latitude: 0, longitude: 0 }, + tags: [], + }, + tenant_id: "tenant-123", + username: "testuser", + ip_address: "192.168.1.100", + started_at: "2024-01-10T12:00:00Z", + last_seen: "2024-01-10T12:30:00Z", + active: true, + authenticated: true, + recorded: false, + type: "ssh", + position: { latitude: 0, longitude: 0 }, + term: "xterm-256color", +}; + +export const mockSessions: IAdminSession[] = [ + { ...mockSession, uid: "session-1", username: "alice" }, + { ...mockSession, uid: "session-2", username: "bob", active: false, authenticated: false }, + { ...mockSession, uid: "session-3", username: "charlie" }, +]; diff --git a/ui/admin/tests/unit/mocks/stats.ts b/ui/admin/tests/unit/mocks/stats.ts new file mode 100644 index 00000000000..44cfa989f9e --- /dev/null +++ b/ui/admin/tests/unit/mocks/stats.ts @@ -0,0 +1,10 @@ +import { IAdminStats } from "@admin/interfaces/IStats"; + +export const mockStats: IAdminStats = { + registered_users: 10, + registered_devices: 25, + online_devices: 15, + active_sessions: 5, + pending_devices: 2, + rejected_devices: 1, +}; diff --git a/ui/admin/tests/unit/mocks/user.ts b/ui/admin/tests/unit/mocks/user.ts new file mode 100644 index 00000000000..e8b79e653b6 --- /dev/null +++ b/ui/admin/tests/unit/mocks/user.ts @@ -0,0 +1,40 @@ +import { IAdminUser } from "@admin/interfaces/IUser"; + +export const mockUser: IAdminUser = { + id: "user-123", + username: "testuser", + name: "testuser", + email: "test@example.com", + created_at: "2024-01-01T00:00:00Z", + last_login: "2024-01-10T12:00:00Z", + email_marketing: false, + status: "confirmed", + max_namespaces: 3, + namespacesOwned: 1, + admin: true, + preferences: { + auth_methods: ["local"], + }, +}; + +export const mockUsers: IAdminUser[] = [ + { ...mockUser, id: "user-1", username: "alice", email: "alice@example.com" }, + { ...mockUser, id: "user-2", username: "bob", email: "bob@example.com" }, + { ...mockUser, id: "user-3", username: "charlie", email: "charlie@example.com" }, +]; + +export const mockNotConfirmedUser: IAdminUser = { + ...mockUser, + id: "user-not-confirmed", + username: "notconfirmed", + status: "not-confirmed", + admin: false, +}; + +export const mockInvitedUser: IAdminUser = { + ...mockUser, + id: "user-invited", + username: "invited", + status: "invited", + admin: false, +}; diff --git a/ui/admin/tests/unit/store/modules/announcements.spec.ts b/ui/admin/tests/unit/store/modules/announcements.spec.ts new file mode 100644 index 00000000000..945f06c9383 --- /dev/null +++ b/ui/admin/tests/unit/store/modules/announcements.spec.ts @@ -0,0 +1,207 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useAnnouncementStore from "@admin/store/modules/announcement"; +import { IAdminAnnouncement, IAdminAnnouncementShort, IAdminAnnouncementRequestBody } from "@admin/interfaces/IAnnouncement"; +import { buildUrl } from "@tests/utils/url"; + +const mockAnnouncementShort: IAdminAnnouncementShort = { + uuid: "52088548-2b99-4f38-ac09-3a8f8988476f", + title: "This is an announcement", + date: "2026-01-06T10:00:00.000Z", +}; + +const mockAnnouncement: IAdminAnnouncement = { + ...mockAnnouncementShort, + content: "## ShellHub new features \n - New feature 1 \n - New feature 2 \n - New feature 3", +}; + +const mockAnnouncementRequestBody: IAdminAnnouncementRequestBody = { + title: "New announcement", + content: "## Content here", +}; + +describe("Admin Announcement Store", () => { + let announcementStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + announcementStore = useAnnouncementStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + }); + + afterEach(() => { mockAdminApi.reset(); }); + + describe("Initial State", () => { + it("should have empty announcements array", () => { + expect(announcementStore.announcements).toEqual([]); + }); + + it("should have empty announcement object", () => { + expect(announcementStore.announcement).toEqual({}); + }); + + it("should have zero announcement count", () => { + expect(announcementStore.announcementCount).toBe(0); + }); + }); + + describe("createAnnouncement", () => { + const baseUrl = "http://localhost:3000/admin/api/announcements"; + + it("should create announcement successfully and update state", async () => { + mockAdminApi.onPost(baseUrl, mockAnnouncementRequestBody).reply(201, mockAnnouncement); + + await expect(announcementStore.createAnnouncement(mockAnnouncementRequestBody)).resolves.not.toThrow(); + + expect(announcementStore.announcement).toEqual(mockAnnouncement); + }); + + it("should throw on server error when creating announcement", async () => { + mockAdminApi.onPost(baseUrl, mockAnnouncementRequestBody).reply(500); + + await expect(announcementStore.createAnnouncement(mockAnnouncementRequestBody)).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when creating announcement", async () => { + mockAdminApi.onPost(baseUrl, mockAnnouncementRequestBody).networkError(); + + await expect(announcementStore.createAnnouncement(mockAnnouncementRequestBody)).rejects.toThrow("Network Error"); + }); + }); + + describe("updateAnnouncement", () => { + const uuid = "52088548-2b99-4f38-ac09-3a8f8988476f"; + const baseUrl = `http://localhost:3000/admin/api/announcements/${uuid}`; + + it("should update announcement successfully", async () => { + mockAdminApi.onPut(baseUrl, mockAnnouncementRequestBody).reply(200); + + await expect(announcementStore.updateAnnouncement(uuid, mockAnnouncementRequestBody)).resolves.not.toThrow(); + }); + + it("should throw on not found error when updating announcement", async () => { + mockAdminApi.onPut(baseUrl, mockAnnouncementRequestBody).reply(404, { message: "Announcement not found" }); + + await expect(announcementStore.updateAnnouncement(uuid, mockAnnouncementRequestBody)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when updating announcement", async () => { + mockAdminApi.onPut(baseUrl, mockAnnouncementRequestBody).networkError(); + + await expect(announcementStore.updateAnnouncement(uuid, mockAnnouncementRequestBody)).rejects.toThrow("Network Error"); + }); + }); + + describe("fetchAnnouncement", () => { + const uuid = "52088548-2b99-4f38-ac09-3a8f8988476f"; + const baseUrl = `http://localhost:3000/admin/api/announcements/${uuid}`; + + it("should fetch announcement successfully and update state", async () => { + mockAdminApi.onGet(baseUrl).reply(200, mockAnnouncement); + + await expect(announcementStore.fetchAnnouncement(uuid)).resolves.not.toThrow(); + + expect(announcementStore.announcement).toEqual(mockAnnouncement); + }); + + it("should throw on not found error when fetching announcement", async () => { + mockAdminApi.onGet(baseUrl).reply(404, { message: "Announcement not found" }); + + await expect(announcementStore.fetchAnnouncement(uuid)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when fetching announcement", async () => { + mockAdminApi.onGet(baseUrl).networkError(); + + await expect(announcementStore.fetchAnnouncement(uuid)).rejects.toThrow("Network Error"); + }); + }); + + describe("fetchAnnouncementList", () => { + const baseUrl = "http://localhost:3000/admin/api/announcements"; + + it("should fetch announcements list successfully with asc ordering", async () => { + const announcementsList = [mockAnnouncementShort]; + + mockAdminApi + .onGet(buildUrl(baseUrl, { page: "1", per_page: "10", order_by: "asc" })) + .reply(200, announcementsList, { "x-total-count": "1" }); + + await expect(announcementStore.fetchAnnouncementList({ page: 1, perPage: 10, orderBy: "asc" })).resolves.not.toThrow(); + + expect(announcementStore.announcements).toEqual(announcementsList); + expect(announcementStore.announcementCount).toBe(1); + }); + + it("should fetch announcements list successfully with desc ordering", async () => { + const announcementsList = [ + mockAnnouncementShort, + { ...mockAnnouncementShort, uuid: "another-uuid", title: "Another announcement" }, + ]; + + mockAdminApi + .onGet(buildUrl(baseUrl, { page: "1", per_page: "10", order_by: "desc" })) + .reply(200, announcementsList, { "x-total-count": "2" }); + + await expect(announcementStore.fetchAnnouncementList({ page: 1, perPage: 10, orderBy: "desc" })).resolves.not.toThrow(); + + expect(announcementStore.announcements).toEqual(announcementsList); + expect(announcementStore.announcementCount).toBe(2); + }); + + it("should fetch empty announcements list successfully", async () => { + mockAdminApi + .onGet(buildUrl(baseUrl, { page: "1", per_page: "10", order_by: "asc" })) + .reply(200, [], { "x-total-count": "0" }); + + await expect(announcementStore.fetchAnnouncementList({ page: 1, perPage: 10, orderBy: "asc" })).resolves.not.toThrow(); + + expect(announcementStore.announcements).toEqual([]); + expect(announcementStore.announcementCount).toBe(0); + }); + + it("should throw on server error when fetching announcements list", async () => { + mockAdminApi + .onGet(buildUrl(baseUrl, { page: "1", per_page: "10", order_by: "asc" })) + .reply(500); + + await expect(announcementStore.fetchAnnouncementList({ page: 1, perPage: 10, orderBy: "asc" })).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when fetching announcements list", async () => { + mockAdminApi + .onGet(buildUrl(baseUrl, { page: "1", per_page: "10", order_by: "asc" })) + .networkError(); + + await expect(announcementStore.fetchAnnouncementList({ page: 1, perPage: 10, orderBy: "asc" })).rejects.toThrow("Network Error"); + }); + }); + + describe("deleteAnnouncement", () => { + const uuid = "52088548-2b99-4f38-ac09-3a8f8988476f"; + const baseUrl = `http://localhost:3000/admin/api/announcements/${uuid}`; + + it("should delete announcement successfully and update state", async () => { + mockAdminApi.onDelete(baseUrl).reply(200, mockAnnouncement); + + await expect(announcementStore.deleteAnnouncement(uuid)).resolves.not.toThrow(); + + expect(announcementStore.announcement).toEqual(mockAnnouncement); + }); + + it("should throw on not found error when deleting announcement", async () => { + mockAdminApi.onDelete(baseUrl).reply(403, { message: "Forbidden" }); + + await expect(announcementStore.deleteAnnouncement(uuid)).rejects.toBeAxiosErrorWithStatus(403); + }); + + it("should throw on network error when deleting announcement", async () => { + mockAdminApi.onDelete(baseUrl).networkError(); + + await expect(announcementStore.deleteAnnouncement(uuid)).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/auth.spec.ts b/ui/admin/tests/unit/store/modules/auth.spec.ts new file mode 100644 index 00000000000..153bde90b49 --- /dev/null +++ b/ui/admin/tests/unit/store/modules/auth.spec.ts @@ -0,0 +1,113 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useAuthStore from "@admin/store/modules/auth"; + +describe("Admin Auth Store", () => { + let authStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + localStorage.clear(); + authStore = useAuthStore(); + }); + + afterEach(() => { + mockAdminApi.reset(); + localStorage.clear(); + }); + + describe("Initial State", () => { + it("should have empty status", () => { + expect(authStore.status).toBe(""); + }); + + it("should have empty token", () => { + expect(authStore.token).toBe(""); + }); + + it("should have empty current user", () => { + expect(authStore.currentUser).toBe(""); + }); + + it("should have isAdmin as false", () => { + expect(authStore.isAdmin).toBe(false); + }); + + it("should have isLoggedIn as false when no token", () => { + expect(authStore.isLoggedIn).toBe(false); + }); + + it("should load token from localStorage if present", () => { + localStorage.setItem("token", "stored-token"); + localStorage.setItem("user", "stored-user"); + localStorage.setItem("admin", "true"); + + setActivePinia(createPinia()); + const newStore = useAuthStore(); + + expect(newStore.token).toBe("stored-token"); + expect(newStore.currentUser).toBe("stored-user"); + expect(newStore.isAdmin).toBe(true); + expect(newStore.isLoggedIn).toBe(true); + }); + }); + + describe("getLoginToken", () => { + const baseUrl = "http://localhost:3000/admin/api/auth/token"; + + it("should fetch token successfully and return it", async () => { + const userId = "user-123"; + const mockResponse = { + token: "generated-token-abc123", + }; + + mockAdminApi.onGet(`${baseUrl}/${userId}`).reply(200, mockResponse); + + const result = await authStore.getLoginToken(userId); + + expect(result).toBe("generated-token-abc123"); + expect(authStore.status).toBe(""); + }); + + it("should set status to error on 500 and throw", async () => { + const userId = "user-123"; + + mockAdminApi.onGet(`${baseUrl}/${userId}`).reply(500); + + await expect(authStore.getLoginToken(userId)).rejects.toBeAxiosErrorWithStatus(500); + expect(authStore.status).toBe("error"); + }); + + it("should set status to error on network error and throw", async () => { + const userId = "user-123"; + + mockAdminApi.onGet(`${baseUrl}/${userId}`).networkError(); + + await expect(authStore.getLoginToken(userId)).rejects.toThrow("Network Error"); + expect(authStore.status).toBe("error"); + }); + }); + + describe("logout", () => { + it("should clear all state and localStorage", () => { + authStore.status = "success"; + authStore.token = "test-token"; + authStore.currentUser = "test-user"; + localStorage.setItem("token", "test-token"); + localStorage.setItem("user", "test-user"); + + authStore.logout(); + + expect(authStore.status).toBe(""); + expect(authStore.token).toBe(""); + expect(authStore.currentUser).toBe(""); + expect(authStore.isLoggedIn).toBe(false); + expect(localStorage.getItem("token")).toBeNull(); + expect(localStorage.getItem("user")).toBeNull(); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/devices.spec.ts b/ui/admin/tests/unit/store/modules/devices.spec.ts new file mode 100644 index 00000000000..43b9215c28e --- /dev/null +++ b/ui/admin/tests/unit/store/modules/devices.spec.ts @@ -0,0 +1,233 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useDevicesStore from "@admin/store/modules/devices"; +import { IAdminDevice } from "@admin/interfaces/IDevice"; +import { buildUrl } from "@tests/utils/url"; + +const mockDeviceBase: IAdminDevice = { + uid: "device-uid-123", + name: "admin-device", + identity: { + mac: "00:1A:2B:3C:4D:5E", + }, + info: { + id: "debian", + pretty_name: "Debian GNU/Linux 11", + version: "11", + arch: "x86_64", + platform: "docker", + }, + public_key: "ssh-rsa AAAAB3NzaC1...", + tenant_id: "tenant-id-789", + last_seen: "2026-01-01T12:00:00.000Z", + status_updated_at: "2026-01-01T12:00:00.000Z", + online: true, + namespace: "admin-namespace", + status: "accepted", + created_at: "2026-01-01T00:00:00.000Z", + remote_addr: "192.168.1.100", + position: { latitude: 0, longitude: 0 }, + tags: [{ + name: "admin", + tenant_id: "tenant-id-789", + created_at: "2026-01-01T00:00:00.000Z", + updated_at: "2026-01-01T00:00:00.000Z", + }], +}; + +describe("Admin Devices Store", () => { + let devicesStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + devicesStore = useDevicesStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + }); + + afterEach(() => { mockAdminApi.reset(); }); + + describe("Initial State", () => { + it("should have empty devices array", () => { + expect(devicesStore.devices).toEqual([]); + }); + + it("should have zero device count", () => { + expect(devicesStore.deviceCount).toBe(0); + }); + + it("should have empty device object", () => { + expect(devicesStore.device).toEqual({}); + }); + + it("should have empty current filter", () => { + expect(devicesStore.currentFilter).toBe(""); + }); + + it("should have undefined current sort field", () => { + expect(devicesStore.currentSortField).toBeUndefined(); + }); + + it("should have undefined current sort order", () => { + expect(devicesStore.currentSortOrder).toBeUndefined(); + }); + }); + + describe("setFilter", () => { + it("should set filter value", () => { + devicesStore.setFilter("status:accepted"); + expect(devicesStore.currentFilter).toBe("status:accepted"); + }); + + it("should set empty string when filter is empty", () => { + devicesStore.setFilter(""); + expect(devicesStore.currentFilter).toBe(""); + }); + }); + + describe("setSort", () => { + it("should set sort field and order", () => { + devicesStore.setSort("name", "asc"); + expect(devicesStore.currentSortField).toBe("name"); + expect(devicesStore.currentSortOrder).toBe("asc"); + }); + + it("should set undefined sort field and order when not provided", () => { + devicesStore.setSort(); + expect(devicesStore.currentSortField).toBeUndefined(); + expect(devicesStore.currentSortOrder).toBeUndefined(); + }); + }); + + describe("fetchDeviceList", () => { + const baseUrl = "http://localhost:3000/admin/api/devices"; + + it("should fetch devices list successfully with default pagination", async () => { + const devicesList = [mockDeviceBase]; + + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })) + .reply(200, devicesList, { "x-total-count": "1" }); + + await expect(devicesStore.fetchDeviceList()).resolves.not.toThrow(); + + expect(devicesStore.devices).toEqual(devicesList); + expect(devicesStore.deviceCount).toBe(1); + }); + + it("should fetch devices list successfully with custom pagination", async () => { + const devicesList = [mockDeviceBase]; + + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "", page: "2", per_page: "20" })) + .reply(200, devicesList, { "x-total-count": "1" }); + + await expect(devicesStore.fetchDeviceList({ page: 2, perPage: 20 })).resolves.not.toThrow(); + + expect(devicesStore.devices).toEqual(devicesList); + expect(devicesStore.deviceCount).toBe(1); + }); + + it("should fetch devices list with filter successfully", async () => { + const devicesList = [mockDeviceBase]; + const filter = "test"; + + mockAdminApi + .onGet(buildUrl(baseUrl, { filter, page: "1", per_page: "10" })) + .reply(200, devicesList, { "x-total-count": "1" }); + + await expect(devicesStore.fetchDeviceList({ filter })).resolves.not.toThrow(); + + expect(devicesStore.devices).toEqual(devicesList); + expect(devicesStore.deviceCount).toBe(1); + }); + + it("should fetch devices list with sort successfully", async () => { + const devicesList = [mockDeviceBase]; + + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10", sort_by: "name", order_by: "asc" })) + .reply(200, devicesList, { "x-total-count": "1" }); + + await expect(devicesStore.fetchDeviceList({ sortField: "name", sortOrder: "asc" })).resolves.not.toThrow(); + + expect(devicesStore.devices).toEqual(devicesList); + expect(devicesStore.deviceCount).toBe(1); + }); + + it("should use current filter and sort when not provided in parameters", async () => { + devicesStore.setFilter("old_filter"); + devicesStore.setSort("created_at", "desc"); + + const devicesList = [mockDeviceBase]; + + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "old_filter", page: "1", per_page: "10", sort_by: "created_at", order_by: "desc" })) + .reply(200, devicesList, { "x-total-count": "1" }); + + await expect(devicesStore.fetchDeviceList()).resolves.not.toThrow(); + + expect(devicesStore.devices).toEqual(devicesList); + expect(devicesStore.deviceCount).toBe(1); + }); + + it("should fetch empty devices list successfully", async () => { + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })) + .reply(200, [], { "x-total-count": "0" }); + + await expect(devicesStore.fetchDeviceList()).resolves.not.toThrow(); + + expect(devicesStore.devices).toEqual([]); + expect(devicesStore.deviceCount).toBe(0); + }); + + it("should throw on server error when fetching devices list", async () => { + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })) + .reply(500); + + await expect(devicesStore.fetchDeviceList()).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when fetching devices list", async () => { + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })) + .networkError(); + + await expect(devicesStore.fetchDeviceList()).rejects.toThrow("Network Error"); + }); + }); + + describe("fetchDeviceById", () => { + const baseGetDeviceUrl = (deviceUid: string) => `http://localhost:3000/admin/api/devices/${deviceUid}`; + + it("should fetch device by id successfully and return data", async () => { + const deviceUid = "device-uid-123"; + + mockAdminApi.onGet(baseGetDeviceUrl(deviceUid)).reply(200, mockDeviceBase); + + await devicesStore.fetchDeviceById(deviceUid); + + expect(devicesStore.device).toEqual(mockDeviceBase); + }); + + it("should throw on not found error when fetching device by id", async () => { + const deviceUid = "non-existent-device"; + + mockAdminApi.onGet(baseGetDeviceUrl(deviceUid)).reply(404, { message: "Device not found" }); + + await expect(devicesStore.fetchDeviceById(deviceUid)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when fetching device by id", async () => { + const deviceUid = "device-uid-123"; + + mockAdminApi.onGet(baseGetDeviceUrl(deviceUid)).networkError(); + + await expect(devicesStore.fetchDeviceById(deviceUid)).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/firewall_rules.spec.ts b/ui/admin/tests/unit/store/modules/firewall_rules.spec.ts new file mode 100644 index 00000000000..2d74f2b7ca9 --- /dev/null +++ b/ui/admin/tests/unit/store/modules/firewall_rules.spec.ts @@ -0,0 +1,124 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useFirewallRulesStore from "@admin/store/modules/firewall_rules"; +import { IAdminFirewallRule } from "@admin/interfaces/IFirewallRule"; +import { buildUrl } from "@tests/utils/url"; + +const mockFirewallRuleBase: IAdminFirewallRule = { + id: "5f1996c84d2190a22d5857bb", + tenant_id: "tenant-id-123", + priority: 4, + action: "allow", + active: true, + source_ip: "192.168.1.100", + username: "admin", + filter: { hostname: "admin-server", tags: [] }, +}; + +describe("Admin Firewall Rules Store", () => { + let firewallRulesStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + firewallRulesStore = useFirewallRulesStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + }); + + afterEach(() => { mockAdminApi.reset(); }); + + describe("Initial State", () => { + it("should have empty firewall rules array", () => { + expect(firewallRulesStore.firewallRules).toEqual([]); + }); + + it("should have zero firewall rules count", () => { + expect(firewallRulesStore.firewallRulesCount).toBe(0); + }); + + it("should have empty firewall rule object", () => { + expect(firewallRulesStore.firewallRule).toEqual({}); + }); + }); + + describe("fetchFirewallRulesList", () => { + const baseUrl = "http://localhost:3000/admin/api/firewall/rules"; + + it("should fetch firewall rules list successfully with default pagination", async () => { + const rulesList = [mockFirewallRuleBase]; + + mockAdminApi.onGet(buildUrl(baseUrl, { page: "1", per_page: "10" })).reply(200, rulesList, { "x-total-count": "1" }); + + await expect(firewallRulesStore.fetchFirewallRulesList()).resolves.not.toThrow(); + + expect(firewallRulesStore.firewallRules).toEqual(rulesList); + expect(firewallRulesStore.firewallRulesCount).toBe(1); + }); + + it("should fetch firewall rules list successfully with custom pagination", async () => { + const rulesList = [ + mockFirewallRuleBase, + { ...mockFirewallRuleBase, id: "5f1996c84d2190a22d5857cc", priority: 3 }, + ]; + + mockAdminApi.onGet(buildUrl(baseUrl, { page: "2", per_page: "20" })).reply(200, rulesList, { "x-total-count": "2" }); + + await expect(firewallRulesStore.fetchFirewallRulesList({ page: 2, perPage: 20 })).resolves.not.toThrow(); + + expect(firewallRulesStore.firewallRules).toEqual(rulesList); + expect(firewallRulesStore.firewallRulesCount).toBe(2); + }); + + it("should fetch empty firewall rules list successfully", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { page: "1", per_page: "10" })).reply(200, [], { "x-total-count": "0" }); + + await expect(firewallRulesStore.fetchFirewallRulesList()).resolves.not.toThrow(); + + expect(firewallRulesStore.firewallRules).toEqual([]); + expect(firewallRulesStore.firewallRulesCount).toBe(0); + }); + + it("should throw on server error when fetching firewall rules list", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { page: "1", per_page: "10" })).reply(500); + + await expect(firewallRulesStore.fetchFirewallRulesList()).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when fetching firewall rules list", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { page: "1", per_page: "10" })).networkError(); + + await expect(firewallRulesStore.fetchFirewallRulesList()).rejects.toThrow("Network Error"); + }); + }); + + describe("fetchFirewallRuleById", () => { + const generateGetRuleUrl = (ruleId: string) => `http://localhost:3000/admin/api/firewall/rules/${ruleId}`; + + it("should fetch firewall rule by id successfully and return data", async () => { + const ruleId = "5f1996c84d2190a22d5857bb"; + + mockAdminApi.onGet(generateGetRuleUrl(ruleId)).reply(200, mockFirewallRuleBase); + + await expect(firewallRulesStore.fetchFirewallRuleById(ruleId)).resolves.not.toThrow(); + expect(firewallRulesStore.firewallRule).toEqual(mockFirewallRuleBase); + }); + + it("should throw on not found error when fetching firewall rule by id", async () => { + const ruleId = "non-existent-rule"; + + mockAdminApi.onGet(generateGetRuleUrl(ruleId)).reply(404, { message: "Firewall rule not found" }); + + await expect(firewallRulesStore.fetchFirewallRuleById(ruleId)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when fetching firewall rule by id", async () => { + const ruleId = "5f1996c84d2190a22d5857bb"; + + mockAdminApi.onGet(generateGetRuleUrl(ruleId)).networkError(); + + await expect(firewallRulesStore.fetchFirewallRuleById(ruleId)).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/instance.spec.ts b/ui/admin/tests/unit/store/modules/instance.spec.ts new file mode 100644 index 00000000000..8173a4c0308 --- /dev/null +++ b/ui/admin/tests/unit/store/modules/instance.spec.ts @@ -0,0 +1,207 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useInstanceStore from "@admin/store/modules/instance"; +import { IAdminAuth, IAdminUpdateSAML } from "@admin/interfaces/IInstance"; + +const mockAuthenticationSettings: IAdminAuth = { + local: { + enabled: true, + }, + saml: { + enabled: true, + auth_url: "https://example.com/auth", + assertion_url: "https://example.com/assertion", + idp: { + entity_id: "entity123", + binding: { + post: "https://example.com/signon-post", + redirect: "https://example.com/signon-redirect", + }, + certificates: ["cert123"], + }, + sp: { + sign_auth_requests: true, + certificate: "cert-sp-123", + }, + }, +}; + +const mockSAMLUpdate: IAdminUpdateSAML = { + enable: false, + idp: { + entity_id: "new-entity", + binding: { + post: "https://new-url.com/post", + redirect: "https://new-url.com/redirect", + }, + }, + sp: { + sign_requests: false, + }, +}; + +describe("Admin Instance Store", () => { + let instanceStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + instanceStore = useInstanceStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + }); + + afterEach(() => { mockAdminApi.reset(); }); + + describe("Initial State", () => { + it("should have default authentication settings", () => { + expect(instanceStore.authenticationSettings).toEqual({ + local: { enabled: false }, + saml: { + enabled: false, + auth_url: "", + assertion_url: "", + idp: { + entity_id: "", + binding: { + post: "", + redirect: "", + }, + certificates: [], + }, + sp: { + sign_auth_requests: false, + certificate: "", + }, + }, + }); + }); + + it("should have isLocalAuthEnabled as false", () => { + expect(instanceStore.isLocalAuthEnabled).toBe(false); + }); + + it("should have isSamlEnabled as false", () => { + expect(instanceStore.isSamlEnabled).toBe(false); + }); + }); + + describe("Computed Properties", () => { + it("should compute isLocalAuthEnabled correctly when enabled", () => { + instanceStore.authenticationSettings = { + ...instanceStore.authenticationSettings, + local: { enabled: true }, + }; + + expect(instanceStore.isLocalAuthEnabled).toBe(true); + }); + + it("should compute isSamlEnabled correctly when enabled", () => { + instanceStore.authenticationSettings = { + ...instanceStore.authenticationSettings, + saml: { + ...instanceStore.authenticationSettings.saml, + enabled: true, + }, + }; + + expect(instanceStore.isSamlEnabled).toBe(true); + }); + }); + + describe("fetchAuthenticationSettings", () => { + const baseUrl = "http://localhost:3000/admin/api/authentication"; + + it("should fetch authentication settings successfully and update state", async () => { + mockAdminApi.onGet(baseUrl).reply(200, mockAuthenticationSettings); + + await instanceStore.fetchAuthenticationSettings(); + + expect(instanceStore.authenticationSettings).toEqual(mockAuthenticationSettings); + expect(instanceStore.isLocalAuthEnabled).toBe(true); + expect(instanceStore.isSamlEnabled).toBe(true); + }); + + it("should throw on server error when fetching settings", async () => { + mockAdminApi.onGet(baseUrl).reply(500); + + await expect(instanceStore.fetchAuthenticationSettings()).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when fetching settings", async () => { + mockAdminApi.onGet(baseUrl).networkError(); + + await expect(instanceStore.fetchAuthenticationSettings()).rejects.toThrow("Network Error"); + }); + }); + + describe("updateLocalAuthentication", () => { + const updateUrl = "http://localhost:3000/admin/api/authentication/local"; + const fetchUrl = "http://localhost:3000/admin/api/authentication"; + + it("should update local authentication and refresh settings", async () => { + const updatedSettings = { + ...mockAuthenticationSettings, + local: { enabled: false }, + }; + + mockAdminApi.onPut(updateUrl, { enable: false }).reply(200); + mockAdminApi.onGet(fetchUrl).reply(200, updatedSettings); + + await instanceStore.updateLocalAuthentication(false); + + expect(instanceStore.authenticationSettings).toEqual(updatedSettings); + expect(instanceStore.isLocalAuthEnabled).toBe(false); + }); + + it("should throw on server error when updating local auth", async () => { + mockAdminApi.onPut(updateUrl, { enable: true }).reply(500); + + await expect(instanceStore.updateLocalAuthentication(true)).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when updating local auth", async () => { + mockAdminApi.onPut(updateUrl, { enable: true }).networkError(); + + await expect(instanceStore.updateLocalAuthentication(true)).rejects.toThrow("Network Error"); + }); + }); + + describe("updateSamlAuthentication", () => { + const updateUrl = "http://localhost:3000/admin/api/authentication/saml"; + const fetchUrl = "http://localhost:3000/admin/api/authentication"; + + it("should update SAML authentication and refresh settings", async () => { + const updatedSettings = { + ...mockAuthenticationSettings, + saml: { + ...mockSAMLUpdate, + }, + }; + + // Payload sends "enable", but response uses "enabled" + const updatedSettingsResponse = { ...updatedSettings, saml: { ...updatedSettings.saml, enabled: false } }; + + mockAdminApi.onPut(updateUrl, mockSAMLUpdate).reply(200); + mockAdminApi.onGet(fetchUrl).reply(200, updatedSettingsResponse); + + await instanceStore.updateSamlAuthentication(mockSAMLUpdate); + + expect(instanceStore.authenticationSettings).toEqual(updatedSettingsResponse); + expect(instanceStore.isSamlEnabled).toBe(false); + }); + + it("should throw on server error when updating SAML auth", async () => { + mockAdminApi.onPut(updateUrl, mockSAMLUpdate).reply(500); + + await expect(instanceStore.updateSamlAuthentication(mockSAMLUpdate)).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when updating SAML auth", async () => { + mockAdminApi.onPut(updateUrl, mockSAMLUpdate).networkError(); + + await expect(instanceStore.updateSamlAuthentication(mockSAMLUpdate)).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/license.spec.ts b/ui/admin/tests/unit/store/modules/license.spec.ts new file mode 100644 index 00000000000..e4fd0d858c6 --- /dev/null +++ b/ui/admin/tests/unit/store/modules/license.spec.ts @@ -0,0 +1,153 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import axios from "axios"; +import useLicenseStore from "@admin/store/modules/license"; +import { IAdminLicense } from "@admin/interfaces/ILicense"; + +const mockLicense: IAdminLicense = { + expired: false, + about_to_expire: false, + grace_period: false, + id: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + issued_at: 1704067200, + starts_at: 1704067200, + expires_at: 1735689600, + allowed_regions: [], + customer: { + id: "customer-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + name: "ShellHub", + email: "contact@shellhub.io", + company: "ShellHub Inc", + }, + features: { + devices: 100, + session_recording: true, + firewall_rules: true, + reports: true, + login_link: true, + billing: true, + }, +}; + +const mockExpiredLicense: IAdminLicense = { + ...mockLicense, + expired: true, +}; + +describe("Admin License Store", () => { + let licenseStore: ReturnType; + let mockAdminApi: MockAdapter; + let mockAxios: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + licenseStore = useLicenseStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + mockAxios = new MockAdapter(axios); + localStorage.clear(); + }); + + afterEach(() => { + mockAdminApi.reset(); + mockAxios.reset(); + localStorage.clear(); + }); + + describe("Initial State", () => { + it("should have empty license object", () => { + expect(licenseStore.license).toEqual({}); + }); + + it("should have isExpired as true when license is empty", () => { + expect(licenseStore.isExpired).toBe(true); + }); + }); + + describe("Computed Properties", () => { + it("should compute isExpired as false when license is not expired", () => { + licenseStore.license = mockLicense; + + expect(licenseStore.isExpired).toBe(false); + }); + + it("should compute isExpired as true when license is expired", () => { + licenseStore.license = mockExpiredLicense; + + expect(licenseStore.isExpired).toBe(true); + }); + + it("should compute isExpired as true when expired field is undefined", () => { + const licenseWithoutExpired = { ...mockLicense }; + delete (licenseWithoutExpired as Partial).expired; + licenseStore.license = licenseWithoutExpired as IAdminLicense; + + expect(licenseStore.isExpired).toBe(true); + }); + }); + + describe("getLicense", () => { + const baseUrl = "http://localhost:3000/admin/api/license"; + + it("should fetch license successfully and update state", async () => { + mockAdminApi.onGet(baseUrl).reply(200, mockLicense); + + await licenseStore.getLicense(); + + expect(licenseStore.license).toEqual(mockLicense); + expect(licenseStore.isExpired).toBe(false); + }); + + it("should throw on not found error when fetching license", async () => { + mockAdminApi.onGet(baseUrl).reply(404); + + await expect(licenseStore.getLicense()).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when fetching license", async () => { + mockAdminApi.onGet(baseUrl).networkError(); + + await expect(licenseStore.getLicense()).rejects.toThrow("Network Error"); + }); + }); + + describe("uploadLicense", () => { + const uploadUrl = `${window.location.origin}/admin/api/license`; + + it("should upload license file successfully with FormData", async () => { + const mockFile = new File(["license content"], "license.txt", { type: "text/plain" }); + const token = "test-bearer-token"; + localStorage.setItem("token", token); + + mockAxios + .onPost(uploadUrl) + .reply((config) => { + expect(config.data).toBeInstanceOf(FormData); + expect(config.headers?.Authorization).toBe(`Bearer ${token}`); + expect(config.headers?.["Content-Type"]).toBe("multipart/form-data"); + return [200, { success: true }]; + }); + + await licenseStore.uploadLicense(mockFile); + }); + + it("should throw on server error when uploading license", async () => { + const mockFile = new File(["license content"], "license.txt", { type: "text/plain" }); + localStorage.setItem("token", "test-token"); + + mockAxios.onPost(uploadUrl).reply(500); + + await expect(licenseStore.uploadLicense(mockFile)).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when uploading license", async () => { + const mockFile = new File(["license content"], "license.txt", { type: "text/plain" }); + localStorage.setItem("token", "test-token"); + + mockAxios.onPost(uploadUrl).networkError(); + + await expect(licenseStore.uploadLicense(mockFile)).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/namespaces.spec.ts b/ui/admin/tests/unit/store/modules/namespaces.spec.ts new file mode 100644 index 00000000000..d349f942ea4 --- /dev/null +++ b/ui/admin/tests/unit/store/modules/namespaces.spec.ts @@ -0,0 +1,259 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useNamespacesStore from "@admin/store/modules/namespaces"; +import { IAdminNamespace } from "@admin/interfaces/INamespace"; +import { buildUrl } from "@tests/utils/url"; + +const mockNamespaceBase: IAdminNamespace = { + name: "admin-namespace", + owner: "admin-user-id", + type: "personal", + devices_accepted_count: 5, + devices_pending_count: 1, + devices_rejected_count: 0, + tenant_id: "tenant-id-123", + members: [ + { + id: "member-id-1", + email: "admin@example.com", + role: "owner", + added_at: "2026-01-06T00:00:00.000Z", + expires_at: "2027-01-06T00:00:00.000Z", + }, + ], + settings: { + session_record: true, + connection_announcement: "", + }, + created_at: "2026-01-06T00:00:00.000Z", + max_devices: 10, +}; + +describe("Admin Namespaces Store", () => { + let namespacesStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + namespacesStore = useNamespacesStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + }); + + afterEach(() => { mockAdminApi.reset(); }); + + describe("Initial State", () => { + it("should have empty namespaces array", () => { + expect(namespacesStore.namespaces).toEqual([]); + }); + + it("should have zero namespace count", () => { + expect(namespacesStore.namespaceCount).toBe(0); + }); + + it("should have empty namespace object", () => { + expect(namespacesStore.namespace).toEqual({}); + }); + + it("should have empty current filter", () => { + expect(namespacesStore.currentFilter).toBe(""); + }); + }); + + describe("setFilter", () => { + it("should set filter value", () => { + namespacesStore.setFilter("owner:admin"); + expect(namespacesStore.currentFilter).toBe("owner:admin"); + }); + + it("should set empty string when filter is empty", () => { + namespacesStore.setFilter(""); + expect(namespacesStore.currentFilter).toBe(""); + }); + }); + + describe("fetchNamespaceList", () => { + const baseUrl = "http://localhost:3000/admin/api/namespaces"; + + it("should fetch namespaces list successfully with default pagination", async () => { + const namespacesList = [mockNamespaceBase]; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })).reply(200, namespacesList, { "x-total-count": "1" }); + + await expect(namespacesStore.fetchNamespaceList()).resolves.not.toThrow(); + + expect(namespacesStore.namespaces).toEqual(namespacesList); + expect(namespacesStore.namespaceCount).toBe(1); + }); + + it("should fetch namespaces list successfully with custom pagination", async () => { + const namespacesList = [mockNamespaceBase]; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "2", per_page: "20" })).reply(200, namespacesList, { "x-total-count": "1" }); + + await expect(namespacesStore.fetchNamespaceList({ page: 2, perPage: 20 })).resolves.not.toThrow(); + + expect(namespacesStore.namespaces).toEqual(namespacesList); + expect(namespacesStore.namespaceCount).toBe(1); + }); + + it("should fetch namespaces list with filter successfully", async () => { + const namespacesList = [mockNamespaceBase]; + const filter = "test"; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter, page: "1", per_page: "10" })).reply(200, namespacesList, { "x-total-count": "1" }); + + await expect(namespacesStore.fetchNamespaceList({ filter })).resolves.not.toThrow(); + + expect(namespacesStore.namespaces).toEqual(namespacesList); + expect(namespacesStore.namespaceCount).toBe(1); + }); + + it("should use current filter when not provided in parameters", async () => { + namespacesStore.setFilter("old_filter"); + + const namespacesList = [mockNamespaceBase]; + + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "old_filter", page: "1", per_page: "10" })) + .reply(200, namespacesList, { "x-total-count": "1" }); + + await expect(namespacesStore.fetchNamespaceList()).resolves.not.toThrow(); + + expect(namespacesStore.namespaces).toEqual(namespacesList); + expect(namespacesStore.namespaceCount).toBe(1); + }); + + it("should fetch empty namespaces list successfully", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })).reply(200, [], { "x-total-count": "0" }); + + await expect(namespacesStore.fetchNamespaceList()).resolves.not.toThrow(); + + expect(namespacesStore.namespaces).toEqual([]); + expect(namespacesStore.namespaceCount).toBe(0); + }); + + it("should throw on server error when fetching namespaces list", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })).reply(500); + + await expect(namespacesStore.fetchNamespaceList()).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when fetching namespaces list", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })).networkError(); + + await expect(namespacesStore.fetchNamespaceList()).rejects.toThrow("Network Error"); + }); + }); + + describe("fetchNamespaceById", () => { + const namespaceId = "tenant-id-123"; + const baseGetNamespaceUrl = `http://localhost:3000/admin/api/namespaces/${namespaceId}`; + + it("should fetch namespace by id successfully and return data", async () => { + mockAdminApi.onGet(baseGetNamespaceUrl).reply(200, mockNamespaceBase); + + await expect(namespacesStore.fetchNamespaceById(namespaceId)).resolves.not.toThrow(); + expect(namespacesStore.namespace).toEqual(mockNamespaceBase); + }); + + it("should throw on not found error when fetching namespace by id", async () => { + mockAdminApi.onGet(baseGetNamespaceUrl).reply(404, { message: "Namespace not found" }); + + await expect(namespacesStore.fetchNamespaceById(namespaceId)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when fetching namespace by id", async () => { + mockAdminApi.onGet(baseGetNamespaceUrl).networkError(); + + await expect(namespacesStore.fetchNamespaceById(namespaceId)).rejects.toThrow("Network Error"); + }); + }); + + describe("exportNamespacesToCsv", () => { + const baseUrl = "http://localhost:3000/admin/api/export/namespaces"; + const csvData = "name,owner,devices\nadmin-namespace,admin,5"; + + it("should export namespaces to CSV successfully and return data", async () => { + const filter = ""; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter })).reply(200, csvData); + + const result = await namespacesStore.exportNamespacesToCsv(filter); + + expect(result).toBe(csvData); + }); + + it("should export namespaces with filter to CSV successfully", async () => { + const filter = "owner:admin"; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter })).reply(200, csvData); + + const result = await namespacesStore.exportNamespacesToCsv(filter); + + expect(result).toBe(csvData); + }); + + it("should throw on not found error when exporting namespaces", async () => { + const filter = ""; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter })).reply(404, { message: "No namespaces to export" }); + + await expect(namespacesStore.exportNamespacesToCsv(filter)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when exporting namespaces", async () => { + const filter = ""; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter })).networkError(); + + await expect(namespacesStore.exportNamespacesToCsv(filter)).rejects.toThrow("Network Error"); + }); + }); + + describe("updateNamespace", () => { + const baseUrl = `http://localhost:3000/admin/api/namespaces-update/${mockNamespaceBase.tenant_id}`; + + it("should update namespace successfully", async () => { + mockAdminApi.onPut(baseUrl, mockNamespaceBase).reply(200); + + await expect(namespacesStore.updateNamespace(mockNamespaceBase)).resolves.not.toThrow(); + }); + + it("should throw on not found error when updating namespace", async () => { + mockAdminApi.onPut(baseUrl, mockNamespaceBase).reply(404, { message: "Namespace not found" }); + + await expect(namespacesStore.updateNamespace(mockNamespaceBase)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when updating namespace", async () => { + mockAdminApi.onPut(baseUrl, mockNamespaceBase).networkError(); + + await expect(namespacesStore.updateNamespace(mockNamespaceBase)).rejects.toThrow("Network Error"); + }); + }); + + describe("deleteNamespace", () => { + const tenantId = "tenant-id-123"; + const baseUrl = `http://localhost:3000/admin/api/namespaces/${tenantId}`; + + it("should delete namespace successfully", async () => { + mockAdminApi.onDelete(baseUrl).reply(200); + + await expect(namespacesStore.deleteNamespace(tenantId)).resolves.not.toThrow(); + }); + + it("should throw on not found error when deleting namespace", async () => { + mockAdminApi.onDelete(baseUrl).reply(404, { message: "Namespace not found" }); + + await expect(namespacesStore.deleteNamespace(tenantId)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when deleting namespace", async () => { + mockAdminApi.onDelete(baseUrl).networkError(); + + await expect(namespacesStore.deleteNamespace(tenantId)).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/sessions.spec.ts b/ui/admin/tests/unit/store/modules/sessions.spec.ts new file mode 100644 index 00000000000..c5ad796f8b6 --- /dev/null +++ b/ui/admin/tests/unit/store/modules/sessions.spec.ts @@ -0,0 +1,158 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useSessionsStore from "@admin/store/modules/sessions"; +import { IAdminSession } from "@admin/interfaces/ISession"; +import { buildUrl } from "@tests/utils/url"; + +const mockSessionBase: IAdminSession = { + uid: "session-uid-123", + device_uid: "device-uid-456", + device: { + uid: "device-uid-456", + name: "admin-device", + identity: { + mac: "00:1A:2B:3C:4D:5E", + }, + info: { + id: "debian", + pretty_name: "Debian GNU/Linux 11", + version: "11", + arch: "x86_64", + platform: "docker", + }, + public_key: "ssh-rsa AAAAB3NzaC1...", + tenant_id: "tenant-id-789", + last_seen: "2026-01-01T12:00:00.000Z", + status_updated_at: "2026-01-01T12:00:00.000Z", + online: true, + namespace: "admin-namespace", + status: "accepted", + created_at: "2026-01-01T00:00:00.000Z", + remote_addr: "192.168.1.100", + position: { latitude: 0, longitude: 0 }, + tags: [{ + name: "admin", + tenant_id: "tenant-id-789", + created_at: "2026-01-01T00:00:00.000Z", + updated_at: "2026-01-01T00:00:00.000Z", + }], + }, + tenant_id: "tenant-id-789", + username: "admin", + ip_address: "192.168.1.50", + started_at: "2026-01-01T10:00:00.000Z", + last_seen: "2026-01-01T12:00:00.000Z", + active: true, + authenticated: true, + recorded: true, + type: "shell", + term: "xterm-256color", + position: { latitude: 0, longitude: 0 }, +}; + +describe("Admin Sessions Store", () => { + let sessionsStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + sessionsStore = useSessionsStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + }); + + afterEach(() => { mockAdminApi.reset(); }); + + describe("Initial State", () => { + it("should have empty sessions array", () => { + expect(sessionsStore.sessions).toEqual([]); + }); + + it("should have zero session count", () => { + expect(sessionsStore.sessionCount).toBe(0); + }); + }); + + describe("fetchSessionList", () => { + const baseUrl = "http://localhost:3000/admin/api/sessions"; + + it("should fetch sessions list successfully with pagination", async () => { + const sessionList = [mockSessionBase]; + + mockAdminApi.onGet(buildUrl(baseUrl, { page: "1", per_page: "10" })).reply(200, sessionList, { "x-total-count": "1" }); + + await expect(sessionsStore.fetchSessionList({ perPage: 10, page: 1 })).resolves.not.toThrow(); + + expect(sessionsStore.sessions).toEqual(sessionList); + expect(sessionsStore.sessionCount).toBe(1); + }); + + it("should fetch sessions list with multiple sessions", async () => { + const sessionList = [ + mockSessionBase, + { ...mockSessionBase, uid: "session-uid-456" }, + ]; + + mockAdminApi.onGet(buildUrl(baseUrl, { page: "2", per_page: "20" })).reply(200, sessionList, { "x-total-count": "2" }); + + await expect(sessionsStore.fetchSessionList({ perPage: 20, page: 2 })).resolves.not.toThrow(); + + expect(sessionsStore.sessions).toEqual(sessionList); + expect(sessionsStore.sessionCount).toBe(2); + }); + + it("should fetch empty sessions list successfully", async () => { + mockAdminApi + .onGet(buildUrl(baseUrl, { page: "1", per_page: "10" })) + .reply(200, [], { "x-total-count": "0" }); + + await expect(sessionsStore.fetchSessionList({ perPage: 10, page: 1 })).resolves.not.toThrow(); + + expect(sessionsStore.sessions).toEqual([]); + expect(sessionsStore.sessionCount).toBe(0); + }); + + it("should throw on server error when fetching sessions list", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { page: "1", per_page: "10" })).reply(500); + + await expect(sessionsStore.fetchSessionList({ perPage: 10, page: 1 })).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when fetching sessions list", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { page: "1", per_page: "10" })).networkError(); + + await expect(sessionsStore.fetchSessionList({ perPage: 10, page: 1 })).rejects.toThrow("Network Error"); + }); + }); + + describe("fetchSessionById", () => { + const generateGetSessionUrl = (sessionUid: string) => `http://localhost:3000/admin/api/sessions/${sessionUid}`; + + it("should fetch session by id successfully and return data", async () => { + const sessionUid = "session-uid-123"; + + mockAdminApi.onGet(generateGetSessionUrl(sessionUid)).reply(200, mockSessionBase); + + await sessionsStore.fetchSessionById(sessionUid); + + expect(sessionsStore.session).toEqual(mockSessionBase); + }); + + it("should throw on not found error when fetching session by id", async () => { + const sessionUid = "non-existent-session"; + + mockAdminApi.onGet(generateGetSessionUrl(sessionUid)).reply(404, { message: "Session not found" }); + + await expect(sessionsStore.fetchSessionById(sessionUid)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when fetching session by id", async () => { + const sessionUid = "session-uid-123"; + + mockAdminApi.onGet(generateGetSessionUrl(sessionUid)).networkError(); + + await expect(sessionsStore.fetchSessionById(sessionUid)).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/stats.spec.ts b/ui/admin/tests/unit/store/modules/stats.spec.ts new file mode 100644 index 00000000000..aed98f4ec5a --- /dev/null +++ b/ui/admin/tests/unit/store/modules/stats.spec.ts @@ -0,0 +1,58 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useStatsStore from "@admin/store/modules/stats"; +import { IAdminStats } from "@admin/interfaces/IStats"; + +const mockStats: IAdminStats = { + registered_devices: 10, + online_devices: 5, + active_sessions: 3, + pending_devices: 2, + rejected_devices: 1, + registered_users: 15, +}; + +describe("Admin Stats Store", () => { + let statsStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + statsStore = useStatsStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + }); + + afterEach(() => { mockAdminApi.reset(); }); + + describe("initial state", () => { + it("should have empty stats initially", () => { + expect(statsStore.stats).toEqual({} as IAdminStats); + }); + }); + + describe("getStats", () => { + const baseUrl = "http://localhost:3000/admin/api/stats"; + + it("should fetch stats successfully and return data", async () => { + mockAdminApi.onGet(baseUrl).reply(200, mockStats); + + await statsStore.getStats(); + + expect(statsStore.stats).toEqual(mockStats); + }); + + it("should throw on not found error when fetching stats", async () => { + mockAdminApi.onGet(baseUrl).reply(404, { message: "Stats not found" }); + + await expect(statsStore.getStats()).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when fetching stats", async () => { + mockAdminApi.onGet(baseUrl).networkError(); + + await expect(statsStore.getStats()).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/store/modules/users.spec.ts b/ui/admin/tests/unit/store/modules/users.spec.ts new file mode 100644 index 00000000000..8919251ac5b --- /dev/null +++ b/ui/admin/tests/unit/store/modules/users.spec.ts @@ -0,0 +1,313 @@ +import { createPinia, setActivePinia } from "pinia"; +import MockAdapter from "axios-mock-adapter"; +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { adminApi } from "@/api/http"; +import useUsersStore from "@admin/store/modules/users"; +import { IAdminUser, IAdminUserFormData } from "@admin/interfaces/IUser"; +import { buildUrl } from "@tests/utils/url"; + +const mockUserBase: IAdminUser = { + id: "user-id-123", + status: "confirmed", + max_namespaces: 5, + created_at: "2026-01-01T00:00:00.000Z", + last_login: "2026-01-01T10:00:00.000Z", + name: "Admin User", + username: "admin", + email: "admin@example.com", + recovery_email: "recovery@example.com", + mfa: { enabled: false }, + namespacesOwned: 2, + preferences: { + auth_methods: ["local"], + }, + email_marketing: false, + admin: true, +}; + +const mockUserFormData: IAdminUserFormData = { + name: "New User", + email: "newuser@example.com", + username: "newuser", + password: "password123", + max_namespaces: 3, + status: "confirmed", + admin: false, +}; + +describe("Admin Users Store", () => { + let usersStore: ReturnType; + let mockAdminApi: MockAdapter; + + beforeEach(() => { + setActivePinia(createPinia()); + usersStore = useUsersStore(); + mockAdminApi = new MockAdapter(adminApi.getAxios()); + }); + + afterEach(() => { mockAdminApi.reset(); }); + + describe("Initial State", () => { + it("should have empty users array", () => { + expect(usersStore.users).toEqual([]); + }); + + it("should have zero users count", () => { + expect(usersStore.usersCount).toBe(0); + }); + + it("should have empty user object", () => { + expect(usersStore.user).toEqual({}); + }); + + it("should have empty current filter", () => { + expect(usersStore.currentFilter).toBe(""); + }); + }); + + describe("setFilter", () => { + it("should set filter value", () => { + usersStore.setFilter("status:confirmed"); + expect(usersStore.currentFilter).toBe("status:confirmed"); + }); + + it("should set empty string when filter is empty", () => { + usersStore.setFilter(""); + expect(usersStore.currentFilter).toBe(""); + }); + }); + + describe("fetchUsersList", () => { + const baseUrl = "http://localhost:3000/admin/api/users"; + + it("should fetch users list successfully with default pagination", async () => { + const usersList = [mockUserBase]; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })).reply(200, usersList, { "x-total-count": "1" }); + + await expect(usersStore.fetchUsersList()).resolves.not.toThrow(); + + expect(usersStore.users).toEqual(usersList); + expect(usersStore.usersCount).toBe(1); + }); + + it("should fetch users list successfully with custom pagination", async () => { + const usersList = [mockUserBase, { ...mockUserBase, id: "user-id-456" }]; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "2", per_page: "20" })).reply(200, usersList, { "x-total-count": "2" }); + + await expect(usersStore.fetchUsersList({ page: 2, perPage: 20 })).resolves.not.toThrow(); + + expect(usersStore.users).toEqual(usersList); + expect(usersStore.usersCount).toBe(2); + }); + + it("should fetch users list with filter successfully", async () => { + const usersList = [mockUserBase]; + const filter = "test"; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter, page: "1", per_page: "10" })).reply(200, usersList, { "x-total-count": "1" }); + + await expect(usersStore.fetchUsersList({ filter })).resolves.not.toThrow(); + + expect(usersStore.users).toEqual(usersList); + expect(usersStore.usersCount).toBe(1); + }); + + it("should use current filter when not provided in parameters", async () => { + usersStore.setFilter("old_filter"); + + const usersList = [mockUserBase]; + + mockAdminApi + .onGet(buildUrl(baseUrl, { filter: "old_filter", page: "1", per_page: "10" })) + .reply(200, usersList, { "x-total-count": "1" }); + + await expect(usersStore.fetchUsersList()).resolves.not.toThrow(); + + expect(usersStore.users).toEqual(usersList); + expect(usersStore.usersCount).toBe(1); + }); + + it("should fetch empty users list successfully", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })).reply(200, [], { "x-total-count": "0" }); + + await expect(usersStore.fetchUsersList()).resolves.not.toThrow(); + + expect(usersStore.users).toEqual([]); + expect(usersStore.usersCount).toBe(0); + }); + + it("should throw on server error when fetching users list", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })).reply(500); + + await expect(usersStore.fetchUsersList()).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when fetching users list", async () => { + mockAdminApi.onGet(buildUrl(baseUrl, { filter: "", page: "1", per_page: "10" })).networkError(); + + await expect(usersStore.fetchUsersList()).rejects.toThrow("Network Error"); + }); + }); + + describe("exportUsersToCsv", () => { + const baseUrl = "http://localhost:3000/admin/api/export/users"; + const csvData = "id,name,email,username\nuser-id-123,Admin User,admin@example.com,admin"; + + it("should export users to CSV successfully and return data", async () => { + const filter = ""; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter })).reply(200, csvData); + + const result = await usersStore.exportUsersToCsv(filter); + + expect(result).toBe(csvData); + }); + + it("should export users with filter to CSV successfully", async () => { + const filter = "admin:true"; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter })).reply(200, csvData); + + const result = await usersStore.exportUsersToCsv(filter); + + expect(result).toBe(csvData); + }); + + it("should throw on server error when exporting users", async () => { + const filter = ""; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter })).reply(500); + + await expect(usersStore.exportUsersToCsv(filter)).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when exporting users", async () => { + const filter = ""; + + mockAdminApi.onGet(buildUrl(baseUrl, { filter })).networkError(); + + await expect(usersStore.exportUsersToCsv(filter)).rejects.toThrow("Network Error"); + }); + }); + + describe("addUser", () => { + const baseUrl = "http://localhost:3000/admin/api/users"; + const { status: _status, ...userData } = mockUserFormData; + it("should add user successfully", async () => { + mockAdminApi.onPost(baseUrl, userData).reply(201); + + await expect(usersStore.addUser(mockUserFormData)).resolves.not.toThrow(); + }); + + it("should throw on forbidden error when adding user", async () => { + mockAdminApi.onPost(baseUrl, userData).reply(403, { message: "Forbidden" }); + + await expect(usersStore.addUser(mockUserFormData)).rejects.toBeAxiosErrorWithStatus(403); + }); + + it("should throw on network error when adding user", async () => { + mockAdminApi.onPost(baseUrl, userData).networkError(); + + await expect(usersStore.addUser(mockUserFormData)).rejects.toThrow("Network Error"); + }); + }); + + describe("fetchUserById", () => { + const userId = "user-id-123"; + const baseGetUserUrl = `http://localhost:3000/admin/api/users/${userId}`; + + it("should fetch user by id successfully and return data", async () => { + mockAdminApi.onGet(baseGetUserUrl).reply(200, mockUserBase); + + await expect(usersStore.fetchUserById(userId)).resolves.not.toThrow(); + expect(usersStore.user).toEqual(mockUserBase); + }); + + it("should throw on not found error when fetching user by id", async () => { + mockAdminApi.onGet(baseGetUserUrl).reply(404, { message: "User not found" }); + + await expect(usersStore.fetchUserById(userId)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when fetching user by id", async () => { + mockAdminApi.onGet(baseGetUserUrl).networkError(); + + await expect(usersStore.fetchUserById(userId)).rejects.toThrow("Network Error"); + }); + }); + + describe("updateUser", () => { + const userId = "user-id-123"; + const updateData = { ...mockUserFormData, id: userId }; + const baseUrl = `http://localhost:3000/admin/api/users/${userId}`; + + it("should update user successfully", async () => { + mockAdminApi.onPut(baseUrl, mockUserFormData).reply(200); + + await expect(usersStore.updateUser(updateData)).resolves.not.toThrow(); + }); + + it("should throw on not found error when updating user", async () => { + mockAdminApi.onPut(baseUrl, mockUserFormData).reply(404, { message: "User not found" }); + + await expect(usersStore.updateUser(updateData)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when updating user", async () => { + mockAdminApi.onPut(baseUrl, mockUserFormData).networkError(); + + await expect(usersStore.updateUser(updateData)).rejects.toThrow("Network Error"); + }); + }); + + describe("deleteUser", () => { + const userId = "user-id-123"; + const baseUrl = `http://localhost:3000/admin/api/users/${userId}`; + + it("should delete user successfully", async () => { + mockAdminApi.onDelete(baseUrl).reply(200); + + await expect(usersStore.deleteUser(userId)).resolves.not.toThrow(); + }); + + it("should throw on not found error when deleting user", async () => { + mockAdminApi.onDelete(baseUrl).reply(404, { message: "User not found" }); + + await expect(usersStore.deleteUser(userId)).rejects.toBeAxiosErrorWithStatus(404); + }); + + it("should throw on network error when deleting user", async () => { + mockAdminApi.onDelete(baseUrl).networkError(); + + await expect(usersStore.deleteUser(userId)).rejects.toThrow("Network Error"); + }); + }); + + describe("resetUserPassword", () => { + const userId = "user-id-123"; + const baseUrl = `http://localhost:3000/admin/api/users/${userId}/password/reset`; + const newPassword = "new-temp-password-123"; + + it("should reset user password successfully and return new password", async () => { + mockAdminApi.onPatch(baseUrl).reply(200, newPassword); + + const result = await usersStore.resetUserPassword(userId); + + expect(result).toBe(newPassword); + }); + + it("should throw on server error when resetting user password", async () => { + mockAdminApi.onPatch(baseUrl).reply(500); + + await expect(usersStore.resetUserPassword(userId)).rejects.toBeAxiosErrorWithStatus(500); + }); + + it("should throw on network error when resetting user password", async () => { + mockAdminApi.onPatch(baseUrl).networkError(); + + await expect(usersStore.resetUserPassword(userId)).rejects.toThrow("Network Error"); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/AnnouncementDetails.spec.ts b/ui/admin/tests/unit/views/AnnouncementDetails.spec.ts new file mode 100644 index 00000000000..9c271b1010f --- /dev/null +++ b/ui/admin/tests/unit/views/AnnouncementDetails.spec.ts @@ -0,0 +1,77 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useAnnouncementStore from "@admin/store/modules/announcement"; +import AnnouncementDetails from "@admin/views/AnnouncementDetails.vue"; +import { mockAnnouncement } from "../mocks"; +import { formatFullDateTime } from "@/utils/date"; + +vi.mock("@admin/store/api/announcement"); + +describe("AnnouncementDetails", () => { + let wrapper: VueWrapper>; + + const mountWrapper = async (mockError?: Error) => { + const router = createCleanAdminRouter(); + await router.push({ name: "announcementDetails", params: { uuid: mockAnnouncement.uuid } }); + await router.isReady(); + + wrapper = mountComponent(AnnouncementDetails, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { adminAnnouncement: mockError ? {} : { announcement: mockAnnouncement } }, + stubActions: !mockError, + }, + }); + + const announcementStore = useAnnouncementStore(); + if (mockError) vi.mocked(announcementStore.fetchAnnouncement).mockRejectedValueOnce(mockError); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when announcement loads successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays announcement title in card header", () => { + expect(wrapper.find(".text-h6").text()).toBe(mockAnnouncement.title); + }); + + it("displays uuid field with value", () => { + const uuidField = wrapper.find('[data-test="announcement-uuid-field"]'); + expect(uuidField.text()).toContain("UUID:"); + expect(uuidField.text()).toContain(mockAnnouncement.uuid); + }); + + it("displays date field", () => { + const dateField = wrapper.find('[data-test="announcement-date-field"]'); + expect(dateField.text()).toContain("Date:"); + expect(dateField.text()).toContain(formatFullDateTime(mockAnnouncement.date)); + }); + + it("displays content field", () => { + const contentField = wrapper.find('[data-test="announcement-content-field"]'); + expect(contentField.text()).toContain("Content:"); + expect(contentField.html()).toContain("

ShellHub new features

"); + }); + + it("shows actions menu button", () => { + const menuBtn = wrapper.find('[data-test="announcement-actions-menu-btn"]'); + expect(menuBtn.exists()).toBe(true); + }); + }); + + describe("when announcement fails to load", () => { + it("shows error snackbar", async () => { + await mountWrapper(createAxiosError(404, "Not Found")); + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to get announcement details."); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/Announcements.spec.ts b/ui/admin/tests/unit/views/Announcements.spec.ts new file mode 100644 index 00000000000..007c8282e69 --- /dev/null +++ b/ui/admin/tests/unit/views/Announcements.spec.ts @@ -0,0 +1,56 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper } from "@vue/test-utils"; +import { mountComponent } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import Announcements from "@admin/views/Announcements.vue"; +import { Router } from "vue-router"; + +describe("Announcements", () => { + let wrapper: VueWrapper>; + let router: Router; + + beforeEach(async () => { + router = createCleanAdminRouter(); + await router.push({ name: "announcements" }); + await router.isReady(); + + wrapper = mountComponent(Announcements, { global: { plugins: [router] } }); + }); + + afterEach(() => { wrapper?.unmount(); }); + + it("displays the page header with correct title", () => { + const header = wrapper.find('[data-test="announcement-title"]'); + expect(header.exists()).toBe(true); + expect(wrapper.text()).toContain("Announcements"); + }); + + it("displays the page header with correct overline", () => { + expect(wrapper.text()).toContain("Platform Messaging"); + }); + + it("displays the page header description", () => { + expect(wrapper.text()).toContain("Share important system broadcasts with every namespace administrator."); + }); + + it("displays the new announcement button", () => { + const newBtn = wrapper.find('[data-test="new-announcement-btn"]'); + expect(newBtn.exists()).toBe(true); + expect(newBtn.text()).toBe("New"); + }); + + it("displays the announcement list component", () => { + const list = wrapper.find('[data-test="announcement-list"]'); + const emptyState = wrapper.find('[data-test="announcements-empty-state"]'); + expect(list.exists() || emptyState.exists()).toBe(true); + }); + + it("navigates to new announcement page when button is clicked", async () => { + const pushSpy = vi.spyOn(router, "push"); + const newBtn = wrapper.find('[data-test="new-announcement-btn"]'); + + await newBtn.trigger("click"); + + expect(pushSpy).toHaveBeenCalledWith({ name: "new-announcement" }); + }); +}); diff --git a/ui/admin/tests/unit/views/Dashboard.spec.ts b/ui/admin/tests/unit/views/Dashboard.spec.ts new file mode 100644 index 00000000000..00eabf08f59 --- /dev/null +++ b/ui/admin/tests/unit/views/Dashboard.spec.ts @@ -0,0 +1,90 @@ +import { describe, expect, it, vi, beforeEach, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useStatsStore from "@admin/store/modules/stats"; +import Dashboard from "@admin/views/Dashboard.vue"; +import { mockStats } from "../mocks"; +import getStats from "@admin/store/api/stats"; + +vi.mock("@admin/store/api/stats"); + +describe("Dashboard", () => { + let wrapper: VueWrapper>; + + const mountWrapper = async (mockError?: Error) => { + const statsStore = useStatsStore(); + if (mockError) { + vi.mocked(statsStore.getStats).mockRejectedValueOnce(mockError); + vi.mocked(getStats).mockRejectedValueOnce(mockError); + } + + wrapper = mountComponent(Dashboard, { + piniaOptions: { + initialState: { adminStats: mockError ? {} : { stats: mockStats } }, + stubActions: !mockError, + }, + }); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when stats load successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays the page header with correct title", () => { + expect(wrapper.text()).toContain("System Overview"); + expect(wrapper.text()).toContain("Admin Dashboard"); + }); + + it("displays the stats section heading", () => { + expect(wrapper.text()).toContain("Stats"); + }); + + it("displays all six stat cards with correct values", () => { + const statsCards = [ + { title: "Registered Users", value: mockStats.registered_users }, + { title: "Registered Devices", value: mockStats.registered_devices }, + { title: "Online Devices", value: mockStats.online_devices }, + { title: "Pending Devices", value: mockStats.pending_devices }, + { title: "Rejected Devices", value: mockStats.rejected_devices }, + { title: "Active Sessions", value: mockStats.active_sessions }, + ]; + + statsCards.forEach(({ title, value }) => { + expect(wrapper.text()).toContain(title); + expect(wrapper.text()).toContain(String(value)); + }); + }); + + it("does not show the error message", () => { + expect(wrapper.find('[data-test="dashboard-failed"]').exists()).toBe(false); + }); + }); + + describe("when stats fail to load", () => { + it("displays error message when loading fails", async () => { + await mountWrapper(createAxiosError(500, "Internal Server Error")); + + expect(wrapper.find('[data-test="dashboard-failed"]').exists()).toBe(true); + expect(wrapper.find('[data-test="dashboard-failed"]').text()).toContain("Something is wrong, try again!"); + }); + + it("shows error snackbar for general errors", async () => { + await mountWrapper(createAxiosError(500, "Internal Server Error")); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to load the dashboard stats. Please try again."); + }); + + it("shows specific error snackbar for license errors", async () => { + await mountWrapper(createAxiosError(402, "Payment Required")); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to load the dashboard stats. Check your license and try again."); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/Device.spec.ts b/ui/admin/tests/unit/views/Device.spec.ts new file mode 100644 index 00000000000..60775f4b200 --- /dev/null +++ b/ui/admin/tests/unit/views/Device.spec.ts @@ -0,0 +1,114 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useDevicesStore from "@admin/store/modules/devices"; +import { mockDevices } from "../mocks"; +import Device from "@admin/views/Device.vue"; + +vi.mock("@admin/store/api/devices"); + +describe("Device", () => { + let wrapper: VueWrapper>; + let router: ReturnType; + let devicesStore: ReturnType; + + const mountWrapper = async (mockError?: Error) => { + router = createCleanAdminRouter(); + await router.push({ name: "devices" }); + await router.isReady(); + + wrapper = mountComponent(Device, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { + adminDevices: { + devices: mockError ? [] : mockDevices, + deviceCount: mockError ? 0 : mockDevices.length, + }, + }, + stubActions: !mockError, + }, + }); + + devicesStore = useDevicesStore(); + if (mockError) vi.mocked(devicesStore.fetchDeviceList).mockRejectedValueOnce(mockError); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when devices load successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays the page header with correct title", () => { + expect(wrapper.text()).toContain("Devices"); + expect(wrapper.text()).toContain("Fleet Oversight"); + }); + + it("displays the search input field", () => { + const searchInput = wrapper.find('[data-test="search-input"]'); + expect(searchInput.exists()).toBe(true); + expect(searchInput.text()).toContain("Search by hostname"); // Placeholder + }); + + it("displays the devices list component", () => { + expect(wrapper.find('[data-test="devices-list"]').exists()).toBe(true); + }); + }); + + describe("when searching for devices", () => { + beforeEach(() => mountWrapper()); + + it("triggers search on keyup event", async () => { + const searchInput = wrapper.find('[data-test="search-input"] input'); + await searchInput.setValue("test-device"); + await searchInput.trigger("keyup"); + await flushPromises(); + + expect(devicesStore.fetchDeviceList).toHaveBeenCalled(); + }); + + it("encodes filter correctly when searching", async () => { + const searchInput = wrapper.find('[data-test="search-input"] input'); + await searchInput.setValue("device-one"); + await searchInput.trigger("keyup"); + await flushPromises(); + + expect(devicesStore.setFilter).toHaveBeenCalled(); + expect(devicesStore.fetchDeviceList).toHaveBeenCalledWith( + expect.objectContaining({ + filter: expect.any(String), + page: 1, + }), + ); + }); + + it("clears filter when search is empty", async () => { + const searchInput = wrapper.find('[data-test="search-input"] input'); + await searchInput.setValue(""); + await searchInput.trigger("keyup"); + await flushPromises(); + + expect(devicesStore.setFilter).toHaveBeenCalledWith(""); + expect(devicesStore.fetchDeviceList).toHaveBeenCalledWith( + expect.objectContaining({ + filter: "", + page: 1, + }), + ); + }); + }); + + describe("when devices fail to load", () => { + it("shows error snackbar", async () => { + await mountWrapper(createAxiosError(500, "Server Error")); + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to fetch devices."); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/DeviceDetails.spec.ts b/ui/admin/tests/unit/views/DeviceDetails.spec.ts new file mode 100644 index 00000000000..04bc10ae4c2 --- /dev/null +++ b/ui/admin/tests/unit/views/DeviceDetails.spec.ts @@ -0,0 +1,138 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import { formatFullDateTime } from "@/utils/date"; +import useDevicesStore from "@admin/store/modules/devices"; +import DeviceDetails from "@admin/views/DeviceDetails.vue"; +import { mockDevice } from "../mocks"; + +vi.mock("@admin/store/api/devices"); + +describe("DeviceDetails", () => { + let wrapper: VueWrapper>; + + const mountWrapper = async (mockError?: Error) => { + const router = createCleanAdminRouter(); + await router.push({ name: "deviceDetails", params: { id: mockDevice.uid } }); + await router.isReady(); + + wrapper = mountComponent(DeviceDetails, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { adminDevices: mockError ? {} : { device: mockDevice } }, + stubActions: !mockError, + }, + }); + + const devicesStore = useDevicesStore(); + if (mockError) vi.mocked(devicesStore.fetchDeviceById).mockRejectedValueOnce(mockError); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when device loads successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays the device name in the card title", () => { + expect(wrapper.find(".text-h6").text()).toBe(mockDevice.name); + }); + + it("shows online status icon", () => { + const icon = wrapper.find('[data-test="online-icon"]'); + expect(icon.exists()).toBe(true); + }); + + it("shows status chip with correct value", () => { + const statusChip = wrapper.find('[data-test="device-status-chip"]'); + expect(statusChip.exists()).toBe(true); + expect(statusChip.text()).toBe(mockDevice.status); + }); + + it("displays device uid", () => { + const uidField = wrapper.find('[data-test="device-uid-field"]'); + expect(uidField.text()).toContain("UID:"); + expect(uidField.text()).toContain(mockDevice.uid); + }); + + it("displays mac address", () => { + const macField = wrapper.find('[data-test="device-mac-field"]'); + expect(macField.text()).toContain("MAC:"); + expect(macField.text()).toContain(mockDevice.identity.mac); + }); + + it("displays operating system", () => { + const osField = wrapper.find('[data-test="device-pretty-name-field"]'); + expect(osField.text()).toContain("Operating System:"); + expect(osField.text()).toContain(mockDevice.info.pretty_name); + }); + + it("displays agent version", () => { + const versionField = wrapper.find('[data-test="device-version-field"]'); + expect(versionField.text()).toContain("Agent Version:"); + expect(versionField.text()).toContain(mockDevice.info.version); + }); + + it("displays architecture", () => { + const archField = wrapper.find('[data-test="device-architecture-field"]'); + expect(archField.text()).toContain("Architecture:"); + expect(archField.text()).toContain(mockDevice.info.arch); + }); + + it("displays platform", () => { + const platformField = wrapper.find('[data-test="device-platform-field"]'); + expect(platformField.text()).toContain("Platform:"); + expect(platformField.text()).toContain(mockDevice.info.platform); + }); + + it("displays namespace with link", () => { + const namespaceField = wrapper.find('[data-test="device-namespace-field"]'); + expect(namespaceField.text()).toContain("Namespace:"); + const link = namespaceField.find("a"); + expect(link.exists()).toBe(true); + expect(link.text()).toBe(mockDevice.namespace); + }); + + it("displays tenant id", () => { + const tenantField = wrapper.find('[data-test="device-tenant-id-field"]'); + expect(tenantField.text()).toContain("Tenant ID:"); + expect(tenantField.text()).toContain(mockDevice.tenant_id); + }); + + it("displays remote address", () => { + const remoteAddrField = wrapper.find('[data-test="device-remote-addr-field"]'); + expect(remoteAddrField.text()).toContain("Remote Address:"); + expect(remoteAddrField.text()).toContain(mockDevice.remote_addr); + }); + + it("displays created at date", () => { + const createdAtField = wrapper.find('[data-test="device-created-at-field"]'); + expect(createdAtField.text()).toContain("Created At:"); + expect(createdAtField.text()).toContain(formatFullDateTime(mockDevice.created_at)); + }); + + it("displays last seen date", () => { + const lastSeenField = wrapper.find('[data-test="device-last-seen-field"]'); + expect(lastSeenField.text()).toContain("Last Seen:"); + expect(lastSeenField.text()).toContain(formatFullDateTime(mockDevice.last_seen)); + }); + + it("displays public key", () => { + const publicKeyField = wrapper.find('[data-test="device-public-key-field"]'); + expect(publicKeyField.text()).toContain("Public Key:"); + expect(publicKeyField.text()).toContain(mockDevice.public_key); + }); + }); + + describe("when device fails to load", () => { + it("shows error snackbar", () => mountWrapper(createAxiosError(404, "Not Found")).then(() => { + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to get device details."); + })); + }); +}); diff --git a/ui/admin/tests/unit/views/FirewallRuleDetails.spec.ts b/ui/admin/tests/unit/views/FirewallRuleDetails.spec.ts new file mode 100644 index 00000000000..5677a6d0109 --- /dev/null +++ b/ui/admin/tests/unit/views/FirewallRuleDetails.spec.ts @@ -0,0 +1,104 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import { formatSourceIP, formatUsername } from "@/utils/string"; +import useFirewallRulesStore from "@admin/store/modules/firewall_rules"; +import FirewallRulesDetails from "@admin/views/FirewallRulesDetails.vue"; +import { mockFirewallRule } from "../mocks"; + +vi.mock("@admin/store/api/firewall_rules"); + +describe("FirewallRulesDetails", () => { + let wrapper: VueWrapper>; + + const mountWrapper = async (mockError?: Error) => { + const router = createCleanAdminRouter(); + await router.push({ name: "firewallRulesDetails", params: { id: mockFirewallRule.id } }); + await router.isReady(); + + wrapper = mountComponent(FirewallRulesDetails, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { adminFirewallRules: mockError ? {} : { firewallRule: mockFirewallRule } }, + stubActions: !mockError, + }, + }); + + const firewallRulesStore = useFirewallRulesStore(); + if (mockError) vi.mocked(firewallRulesStore.fetchFirewallRuleById).mockRejectedValueOnce(mockError); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when firewall rule loads successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays the rule priority in the card title", () => { + expect(wrapper.find(".text-h6").text()).toBe(`Rule #${mockFirewallRule.priority}`); + }); + + it("shows active status icon", () => { + const icon = wrapper.find('[data-test="active-icon"]'); + expect(icon.exists()).toBe(true); + }); + + it("shows action chip with correct value", () => { + const actionChip = wrapper.find('[data-test="firewall-action-chip"]'); + expect(actionChip.exists()).toBe(true); + expect(actionChip.text()).toBe(mockFirewallRule.action); + }); + + it("displays firewall rule id", () => { + const idField = wrapper.find('[data-test="firewall-id-field"]'); + expect(idField.text()).toContain("ID:"); + expect(idField.text()).toContain(mockFirewallRule.id); + }); + + it("displays priority", () => { + const priorityField = wrapper.find('[data-test="firewall-priority-field"]'); + expect(priorityField.text()).toContain("Priority:"); + expect(priorityField.text()).toContain(String(mockFirewallRule.priority)); + }); + + it("displays namespace with link", () => { + const tenantField = wrapper.find('[data-test="firewall-tenant-field"]'); + expect(tenantField.text()).toContain("Namespace:"); + const link = tenantField.find("a"); + expect(link.exists()).toBe(true); + expect(link.text()).toBe(mockFirewallRule.tenant_id); + }); + + it("displays source ip", () => { + const sourceIpField = wrapper.find('[data-test="firewall-source-ip-field"]'); + expect(sourceIpField.text()).toContain("Source IP:"); + expect(sourceIpField.text()).toContain(formatSourceIP(mockFirewallRule.source_ip)); + }); + + it("displays username", () => { + const usernameField = wrapper.find('[data-test="firewall-username-field"]'); + expect(usernameField.text()).toContain("Username:"); + expect(usernameField.text()).toContain(formatUsername(mockFirewallRule.username)); + }); + + it("displays filter information", () => { + const filterField = wrapper.find('[data-test="firewall-filter-field"]'); + expect(filterField.exists()).toBe(true); + expect(filterField.text()).toContain("Filter:"); + }); + }); + + describe("when firewall rule fails to load", () => { + it("shows error snackbar", async () => { + await mountWrapper(createAxiosError(404, "Not Found")); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to get firewall rule details."); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/FirewallRules.spec.ts b/ui/admin/tests/unit/views/FirewallRules.spec.ts new file mode 100644 index 00000000000..cc593c6033c --- /dev/null +++ b/ui/admin/tests/unit/views/FirewallRules.spec.ts @@ -0,0 +1,34 @@ +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { VueWrapper } from "@vue/test-utils"; +import { mountComponent } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import FirewallRules from "@admin/views/FirewallRules.vue"; + +describe("FirewallRules", () => { + let wrapper: VueWrapper>; + + const mountWrapper = async () => { + const router = createCleanAdminRouter(); + await router.push({ name: "firewall-rules" }); + await router.isReady(); + + wrapper = mountComponent(FirewallRules, { global: { plugins: [router] } }); + }; + + beforeEach(() => mountWrapper()); + + afterEach(() => { wrapper?.unmount(); }); + + it("displays the page header with correct title", () => { + expect(wrapper.text()).toContain("Firewall Rules"); + expect(wrapper.text()).toContain("Security Controls"); + }); + + it("displays the page header description", () => { + expect(wrapper.text()).toContain("Review every policy applied across namespaces and confirm access is locked down."); + }); + + it("displays the firewall rules list component", () => { + expect(wrapper.find('[data-test="firewall-rules-list"]').exists()).toBe(true); + }); +}); diff --git a/ui/admin/tests/unit/views/NamespaceDetails.spec.ts b/ui/admin/tests/unit/views/NamespaceDetails.spec.ts new file mode 100644 index 00000000000..f0c3cc422f6 --- /dev/null +++ b/ui/admin/tests/unit/views/NamespaceDetails.spec.ts @@ -0,0 +1,164 @@ +import { describe, expect, it, beforeEach, vi } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import { formatFullDateTime } from "@/utils/date"; +import useNamespacesStore from "@admin/store/modules/namespaces"; +import NamespaceDetails from "@admin/views/NamespaceDetails.vue"; +import { mockNamespace } from "../mocks"; +import { afterEach } from "vitest"; + +vi.mock("@admin/store/api/namespaces"); + +describe("NamespaceDetails", () => { + let wrapper: VueWrapper>; + + const devicesCount = mockNamespace.devices_accepted_count + + mockNamespace.devices_pending_count + + mockNamespace.devices_rejected_count; + + const mountWrapper = async (mockError?: Error) => { + const router = createCleanAdminRouter(); + await router.push({ name: "namespaceDetails", params: { id: mockNamespace.tenant_id } }); + await router.isReady(); + + wrapper = mountComponent(NamespaceDetails, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { adminNamespaces: mockError ? {} : { namespace: mockNamespace } }, + stubActions: !mockError, + }, + }); + + const namespacesStore = useNamespacesStore(); + if (mockError) vi.mocked(namespacesStore.fetchNamespaceById).mockRejectedValueOnce(mockError); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when namespace loads successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays the title", () => { + expect(wrapper.find("h1").text()).toBe("Namespace Details"); + }); + + it("displays namespace name in card title and field", () => { + expect(wrapper.find(".text-h6").text()).toContain(mockNamespace.name); + const nameField = wrapper.find('[data-test="namespace-name-field"]'); + expect(nameField.text()).toContain("Name:"); + expect(nameField.text()).toContain(mockNamespace.name); + }); + + it("shows type chip with correct value", () => { + const typeChip = wrapper.find('[data-test="namespace-type-chip"]'); + expect(typeChip.exists()).toBe(true); + expect(typeChip.text()).toBe(mockNamespace.type); + }); + + it("displays tenant id", () => { + const tenantField = wrapper.find('[data-test="namespace-tenant-id-field"]'); + expect(tenantField.text()).toContain("Tenant ID:"); + expect(tenantField.text()).toContain(mockNamespace.tenant_id); + }); + + it("displays owner with link", () => { + const ownerField = wrapper.find('[data-test="namespace-owner-field"]'); + expect(ownerField.text()).toContain("Owner:"); + const link = ownerField.find("a"); + expect(link.exists()).toBe(true); + expect(link.text()).toBe(mockNamespace.owner); + }); + + it("displays total devices count", () => { + const devicesField = wrapper.find('[data-test="namespace-devices-field"]'); + expect(devicesField.text()).toContain("Total Devices:"); + expect(devicesField.text()).toContain(String(devicesCount)); + }); + + it("displays devices breakdown with correct counts", () => { + const breakdown = wrapper.find('[data-test="namespace-devices-breakdown"]'); + const accepted = breakdown.find('[data-test="namespace-devices-accepted"]'); + expect(accepted.text()).toContain("Accepted:"); + expect(accepted.text()).toContain(String(mockNamespace.devices_accepted_count)); + + const pending = breakdown.find('[data-test="namespace-devices-pending"]'); + expect(pending.text()).toContain("Pending:"); + expect(pending.text()).toContain(String(mockNamespace.devices_pending_count)); + + const rejected = breakdown.find('[data-test="namespace-devices-rejected"]'); + expect(rejected.text()).toContain("Rejected:"); + expect(rejected.text()).toContain(String(mockNamespace.devices_rejected_count)); + }); + + it("displays created at date", () => { + const createdField = wrapper.find('[data-test="namespace-created-field"]'); + expect(createdField.text()).toContain("Created:"); + expect(createdField.text()).toContain(formatFullDateTime(mockNamespace.created_at)); + }); + + it("displays max devices", () => { + const maxDevicesField = wrapper.find('[data-test="namespace-max-devices-field"]'); + expect(maxDevicesField.text()).toContain("Max Devices:"); + expect(maxDevicesField.text()).toContain(String(mockNamespace.max_devices)); + }); + + it("displays session record setting", () => { + const sessionRecordField = wrapper.find('[data-test="namespace-session-record-field"]'); + expect(sessionRecordField.text()).toContain("Session Record:"); + expect(sessionRecordField.text()).toContain("Enabled"); + }); + + it("displays connection announcement", () => { + const announcementField = wrapper.find('[data-test="namespace-connection-announcement-field"]'); + expect(announcementField.text()).toContain("Connection Announcement:"); + expect(announcementField.text()).toContain(mockNamespace.settings.connection_announcement); + }); + + it("displays members section", () => { + const membersSection = wrapper.find('[data-test="namespace-members-section"]'); + expect(membersSection.exists()).toBe(true); + expect(membersSection.text()).toContain(`Members (${mockNamespace.members.length})`); + }); + + it("displays members list with all member items", () => { + const membersList = wrapper.find('[data-test="namespace-members-list"]'); + expect(membersList.exists()).toBe(true); + const memberItems = wrapper.findAll('[data-test="namespace-member-item"]'); + expect(memberItems.length).toBe(mockNamespace.members.length); + }); + + it("displays member roles with correct values", () => { + const roles = wrapper.findAll('[data-test="namespace-member-role"]'); + expect(roles.length).toBe(mockNamespace.members.length); + expect(roles[0].text()).toContain("owner"); + }); + + it("displays member ids", () => { + const memberIds = wrapper.findAll('[data-test="namespace-member-id"]'); + expect(memberIds.length).toBe(mockNamespace.members.length); + expect(memberIds[0].text()).toContain(mockNamespace.members[0].id); + }); + + it("displays member added dates", () => { + const addedDates = wrapper.findAll('[data-test="namespace-member-added"]'); + expect(addedDates.length).toBeGreaterThan(0); + expect(addedDates[0].text()).toContain("Added:"); + expect(addedDates[0].text()).toContain(formatFullDateTime(mockNamespace.members[0].added_at)); + }); + }); + + describe("when namespace fails to load", () => { + it("shows error snackbar", async () => { + await mountWrapper(createAxiosError(404, "Not Found")); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to fetch namespace details."); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/Namespaces.spec.ts b/ui/admin/tests/unit/views/Namespaces.spec.ts new file mode 100644 index 00000000000..79ca047b0f0 --- /dev/null +++ b/ui/admin/tests/unit/views/Namespaces.spec.ts @@ -0,0 +1,113 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import useNamespacesStore from "@admin/store/modules/namespaces"; +import { mockNamespaces } from "../mocks"; +import Namespaces from "@admin/views/Namespaces.vue"; + +vi.mock("@admin/store/api/namespaces"); + +describe("Namespaces", () => { + let wrapper: VueWrapper>; + let namespacesStore: ReturnType; + + const mountWrapper = async (mockError?: Error) => { + const router = createCleanAdminRouter(); + await router.push({ name: "namespaces" }); + await router.isReady(); + + wrapper = mountComponent(Namespaces, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { + adminNamespaces: { + namespaces: mockError ? [] : mockNamespaces, + namespaceCount: mockError ? 0 : mockNamespaces.length, + }, + }, + stubActions: !mockError, + }, + }); + + namespacesStore = useNamespacesStore(); + if (mockError) vi.mocked(namespacesStore.fetchNamespaceList).mockRejectedValueOnce(mockError); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when namespaces load successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays the page header with correct title", () => { + expect(wrapper.text()).toContain("Namespaces"); + expect(wrapper.text()).toContain("Namespace Management"); + }); + + it("displays the page header description", () => { + expect(wrapper.text()).toContain("Track every tenant, search by name, and export namespace data for audits."); + }); + + it("displays the search input field", () => { + const searchInput = wrapper.find('[data-test="search-input"]'); + expect(searchInput.exists()).toBe(true); + expect(searchInput.text()).toContain("Search by name"); // Placeholder + }); + + it("displays the export namespaces button", () => { + expect(wrapper.find('[data-test="namespaces-export-btn"]').exists()).toBe(true); + }); + + it("displays the namespaces list component", () => { + expect(wrapper.find('[data-test="namespaces-list"]').exists()).toBe(true); + }); + }); + + describe("when searching for namespaces", () => { + beforeEach(() => mountWrapper()); + + it("triggers search on keyup event", async () => { + const searchInput = wrapper.find('[data-test="search-input"] input'); + await searchInput.setValue("test-namespace"); + await searchInput.trigger("keyup"); + await flushPromises(); + + expect(namespacesStore.fetchNamespaceList).toHaveBeenCalled(); + }); + + it("encodes filter correctly when searching", async () => { + const searchInput = wrapper.find('[data-test="search-input"] input'); + await searchInput.setValue("dev"); + await searchInput.trigger("keyup"); + await flushPromises(); + + expect(namespacesStore.setFilter).toHaveBeenCalled(); + expect(namespacesStore.fetchNamespaceList).toHaveBeenCalledWith( + expect.objectContaining({ + filter: expect.any(String), + page: 1, + }), + ); + }); + + it("clears filter when search is empty", async () => { + const searchInput = wrapper.find('[data-test="search-input"] input'); + await searchInput.setValue(""); + await searchInput.trigger("keyup"); + await flushPromises(); + + expect(namespacesStore.setFilter).toHaveBeenCalledWith(""); + expect(namespacesStore.fetchNamespaceList).toHaveBeenCalledWith( + expect.objectContaining({ + filter: "", + page: 1, + }), + ); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/NewAnnouncement.spec.ts b/ui/admin/tests/unit/views/NewAnnouncement.spec.ts new file mode 100644 index 00000000000..140e955902c --- /dev/null +++ b/ui/admin/tests/unit/views/NewAnnouncement.spec.ts @@ -0,0 +1,165 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useAnnouncementStore from "@admin/store/modules/announcement"; +import NewAnnouncement from "@admin/views/NewAnnouncement.vue"; + +vi.mock("@admin/store/api/announcement"); + +// Mock TinyMCE Editor to make it testable +vi.mock("@tinymce/tinymce-vue", () => ({ + default: { + name: "Editor", + // eslint-disable-next-line vue/max-len + template: "", + props: ["modelValue", "apiKey", "init", "toolbar", "outputFormat"], + }, +})); + +describe("NewAnnouncement", () => { + let wrapper: VueWrapper>; + let announcementStore: ReturnType; + + const mountWrapper = async () => { + const router = createCleanAdminRouter(); + await router.push({ name: "new-announcement" }); + await router.isReady(); + + wrapper = mountComponent(NewAnnouncement, { global: { plugins: [router] } }); + + announcementStore = useAnnouncementStore(); + await flushPromises(); + }; + + beforeEach(() => mountWrapper()); + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + it("displays the page header with correct title", () => { + expect(wrapper.text()).toContain("Create new Announcement"); + expect(wrapper.text()).toContain("Platform Messaging"); + }); + + it("displays the page header description", () => { + expect(wrapper.text()).toContain("Compose a system-wide update to keep every namespace informed about critical changes."); + }); + + it("displays the title input field", () => { + const titleInput = wrapper.find('[data-test="announcement-title-field"] input'); + expect(titleInput.exists()).toBe(true); + expect(wrapper.text()).toContain("Title"); + }); + + it("displays the content editor", () => { + expect(wrapper.find('[data-test="announcement-content"]').exists()).toBe(true); + expect(wrapper.text()).toContain("Content"); + }); + + it("displays the post button", () => { + const postBtn = wrapper.find('[data-test="announcement-btn-post"]'); + expect(postBtn.exists()).toBe(true); + expect(postBtn.text()).toBe("Post"); + }); + + describe("when posting announcement", () => { + it("shows error when title is empty", async () => { + const postBtn = wrapper.find('[data-test="announcement-btn-post"]'); + await postBtn.trigger("click"); + await flushPromises(); + + const titleInput = wrapper.find('[data-test="announcement-title-field"]'); + expect(titleInput.text()).toContain("Title cannot be empty!"); + }); + + it("shows error when content is empty", async () => { + const titleInput = wrapper.find('[data-test="announcement-title-field"] input'); + await titleInput.setValue("Test Title"); + await flushPromises(); + + const postBtn = wrapper.find('[data-test="announcement-btn-post"]'); + await postBtn.trigger("click"); + await flushPromises(); + + const errorAlert = wrapper.find('[data-test="announcement-error"]'); + expect(errorAlert.exists()).toBe(true); + expect(errorAlert.text()).toContain("The announcement cannot be empty!"); + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to create announcement."); + }); + + it("creates announcement successfully with valid data", async () => { + const titleInput = wrapper.find('[data-test="announcement-title-field"] input'); + await titleInput.setValue("Important Update"); + await flushPromises(); + + const contentEditor = wrapper.find('[data-test="announcement-content"]'); + await contentEditor.setValue("

This is the announcement content

"); + await contentEditor.trigger("input"); + await flushPromises(); + + vi.mocked(announcementStore.createAnnouncement).mockResolvedValueOnce(undefined); + + const postBtn = wrapper.find('[data-test="announcement-btn-post"]'); + await postBtn.trigger("click"); + await flushPromises(); + + expect(announcementStore.createAnnouncement).toHaveBeenCalledWith( + expect.objectContaining({ + title: "Important Update", + content: expect.any(String), + }), + ); + expect(mockSnackbar.showSuccess).toHaveBeenCalledWith("Successfully created announcement."); + }); + + it("shows error when API call fails", async () => { + const titleInput = wrapper.find('[data-test="announcement-title-field"] input'); + await titleInput.setValue("Failed Announcement"); + await flushPromises(); + + const contentEditor = wrapper.find('[data-test="announcement-content"]'); + await contentEditor.setValue("

Content

"); + await contentEditor.trigger("input"); + await flushPromises(); + + vi.mocked(announcementStore.createAnnouncement).mockRejectedValueOnce( + createAxiosError(500, "Server Error"), + ); + + const postBtn = wrapper.find('[data-test="announcement-btn-post"]'); + await postBtn.trigger("click"); + await flushPromises(); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to create announcement."); + }); + }); + + describe("when content changes", () => { + it("clears announcement error when content is added", async () => { + const titleInput = wrapper.find('[data-test="announcement-title-field"] input'); + await titleInput.setValue("Test Title"); + await flushPromises(); + + // Trigger error first by posting without content + const postBtn = wrapper.find('[data-test="announcement-btn-post"]'); + await postBtn.trigger("click"); + await flushPromises(); + + let errorAlert = wrapper.find('[data-test="announcement-error"]'); + expect(errorAlert.exists()).toBe(true); + + // Add content to clear the error + const contentEditor = wrapper.find('[data-test="announcement-content"]'); + await contentEditor.setValue("

New content

"); + await contentEditor.trigger("input"); + await flushPromises(); + + errorAlert = wrapper.find('[data-test="announcement-error"]'); + expect(errorAlert.exists()).toBe(false); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/SessionDetails.spec.ts b/ui/admin/tests/unit/views/SessionDetails.spec.ts new file mode 100644 index 00000000000..7ccb5f3e32d --- /dev/null +++ b/ui/admin/tests/unit/views/SessionDetails.spec.ts @@ -0,0 +1,112 @@ +import { describe, expect, it, beforeEach, vi } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import useSessionsStore from "@admin/store/modules/sessions"; +import { mockSession } from "../mocks"; +import SessionDetails from "@admin/views/SessionDetails.vue"; +import { formatFullDateTime } from "@/utils/date"; +import { afterEach } from "vitest"; + +vi.mock("@admin/store/api/sessions"); + +describe("SessionDetails", () => { + let wrapper: VueWrapper>; + + const mountWrapper = async (session = mockSession, mockError?: Error) => { + const router = createCleanAdminRouter(); + await router.push({ name: "sessionDetails", params: { id: session.uid } }); + await router.isReady(); + + wrapper = mountComponent(SessionDetails, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { adminSessions: mockError ? {} : { session } }, + stubActions: !mockError, + }, + }); + + const sessionsStore = useSessionsStore(); + if (mockError) vi.mocked(sessionsStore.fetchSessionById).mockRejectedValueOnce(mockError); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when session loads successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays the session uid in the card title", () => { + expect(wrapper.find(".text-h6").text()).toBe(mockSession.uid); + }); + + it("displays session uid field", () => { + const uidField = wrapper.find('[data-test="session-uid-field"]'); + expect(uidField.text()).toContain("UID:"); + expect(uidField.text()).toContain(mockSession.uid); + }); + + it("displays device field with link", () => { + const deviceField = wrapper.find('[data-test="session-device-field"]'); + expect(deviceField.text()).toContain("Device:"); + const link = deviceField.find("a"); + expect(link.exists()).toBe(true); + expect(link.text()).toBe(mockSession.device.name); + }); + + it("displays username field", () => { + const usernameField = wrapper.find('[data-test="session-username-field"]'); + expect(usernameField.text()).toContain("Username:"); + expect(usernameField.text()).toContain(mockSession.username); + }); + + it("displays ip address field", () => { + const ipField = wrapper.find('[data-test="session-ip-field"]'); + expect(ipField.text()).toContain("IP Address:"); + expect(ipField.text()).toContain(mockSession.ip_address); + }); + + it("displays session type field", () => { + const typeField = wrapper.find('[data-test="session-type-field"]'); + expect(typeField.text()).toContain("Type:"); + expect(typeField.text()).toContain(mockSession.type); + }); + + it("displays started at field", () => { + const startedField = wrapper.find('[data-test="session-started-field"]'); + expect(startedField.text()).toContain("Started At:"); + expect(startedField.text()).toContain(formatFullDateTime(mockSession.started_at)); + }); + + it("displays last seen field", () => { + const lastSeenField = wrapper.find('[data-test="session-last-seen-field"]'); + expect(lastSeenField.text()).toContain("Last Seen:"); + expect(lastSeenField.text()).toContain(formatFullDateTime(mockSession.last_seen)); + }); + + it("displays authenticated status field", () => { + const authenticatedField = wrapper.find('[data-test="session-authenticated-field"]'); + expect(authenticatedField.text()).toContain("Authenticated:"); + expect(authenticatedField.text()).toContain(mockSession.authenticated ? "Yes" : "No"); + }); + + it("displays active status icon", () => { + const activeIcon = wrapper.find('[data-test="active-icon"]'); + expect(activeIcon.exists()).toBe(true); + expect(activeIcon.classes()).toContain("text-success"); + }); + }); + + describe("when session fails to load", () => { + it("shows error snackbar", async () => { + await mountWrapper(mockSession, createAxiosError(404, "Not Found")); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to get session details."); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/Sessions.spec.ts b/ui/admin/tests/unit/views/Sessions.spec.ts new file mode 100644 index 00000000000..1ad899f1b01 --- /dev/null +++ b/ui/admin/tests/unit/views/Sessions.spec.ts @@ -0,0 +1,31 @@ +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { VueWrapper } from "@vue/test-utils"; +import { mountComponent } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import Sessions from "@admin/views/Sessions.vue"; + +describe("Sessions", () => { + let wrapper: VueWrapper>; + let router: ReturnType; + + beforeEach(async () => { + router = createCleanAdminRouter(); + await router.push({ name: "sessions" }); + await router.isReady(); + + wrapper = mountComponent(Sessions, { global: { plugins: [router] } }); + }); + + afterEach(() => { wrapper?.unmount(); }); + + it("displays the page header with correct title", () => { + expect(wrapper.text()).toContain("Sessions"); + expect(wrapper.text()).toContain("Activity Monitoring"); + }); + + it("renders the session list component", () => { + const list = wrapper.find('[data-test="session-list"]'); + const emptyState = wrapper.find('[data-test="sessions-empty-state"]'); + expect(list.exists() || emptyState.exists()).toBe(true); + }); +}); diff --git a/ui/admin/tests/unit/views/Unauthorized.spec.ts b/ui/admin/tests/unit/views/Unauthorized.spec.ts new file mode 100644 index 00000000000..45f7fb02ce5 --- /dev/null +++ b/ui/admin/tests/unit/views/Unauthorized.spec.ts @@ -0,0 +1,72 @@ +import { describe, expect, it, beforeEach, afterEach, vi } from "vitest"; +import { VueWrapper } from "@vue/test-utils"; +import { mountComponent } from "@tests/utils/mount"; +import useAuthStore from "@admin/store/modules/auth"; +import Unauthorized from "@admin/views/Unauthorized.vue"; + +describe("Unauthorized", () => { + let wrapper: VueWrapper>; + let authStore: ReturnType; + + beforeEach(() => { + wrapper = mountComponent(Unauthorized); + authStore = useAuthStore(); + }); + + afterEach(() => { + wrapper?.unmount(); + vi.clearAllMocks(); + }); + + it("displays the access denied heading", () => { + expect(wrapper.find("h1").text()).toBe("Admin Access Required"); + }); + + it("displays the what you can do section", () => { + expect(wrapper.find("h2").text()).toBe("What you can do:"); + }); + + it("displays all action items", () => { + const actionItems = [ + "Return to the main ShellHub application", + "Contact your system administrator for admin access", + "Manage your devices, sessions, and namespaces in the main app", + ]; + + const listItems = wrapper.findAll(".v-list-item-title"); + expect(listItems).toHaveLength(actionItems.length); + + actionItems.forEach((item, index) => { + expect(listItems[index].text()).toBe(item); + }); + }); + + it("displays the info alert", () => { + const alert = wrapper.find(".v-alert"); + expect(alert.exists()).toBe(true); + expect(alert.text()).toContain("If you believe you should have admin access"); + }); + + it("displays logout and go to main app buttons", () => { + const buttons = wrapper.findAll(".v-btn"); + expect(buttons).toHaveLength(2); + + expect(buttons[0].text()).toContain("Logout"); + expect(buttons[1].text()).toContain("Go to ShellHub"); + }); + + it("calls logout and redirects when logout button is clicked", async () => { + const logoutButton = wrapper.findAll(".v-btn")[0]; + await logoutButton.trigger("click"); + + expect(authStore.logout).toHaveBeenCalled(); + expect(window.location.href).toBe("/login"); + }); + + it("redirects to main app when go to shellhub button is clicked", async () => { + const goToMainAppButton = wrapper.findAll(".v-btn")[1]; + await goToMainAppButton.trigger("click"); + + expect(window.location.href).toBe("/"); + }); +}); diff --git a/ui/admin/tests/unit/views/UserDetails.spec.ts b/ui/admin/tests/unit/views/UserDetails.spec.ts new file mode 100644 index 00000000000..9d8f10fecbf --- /dev/null +++ b/ui/admin/tests/unit/views/UserDetails.spec.ts @@ -0,0 +1,125 @@ +import { describe, expect, it, beforeEach, vi, afterEach } from "vitest"; +import { VueWrapper, flushPromises } from "@vue/test-utils"; +import { mountComponent, mockSnackbar } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import { createAxiosError } from "@tests/utils/axiosError"; +import { formatFullDateTime } from "@/utils/date"; +import useUsersStore from "@admin/store/modules/users"; +import UserDetails from "@admin/views/UserDetails.vue"; +import { mockUser } from "../mocks"; + +vi.mock("@admin/store/api/users"); + +describe("UserDetails", () => { + let wrapper: VueWrapper>; + + const mountWrapper = async (mockError?: Error) => { + const router = createCleanAdminRouter(); + await router.push({ name: "userDetails", params: { id: mockUser.id } }); + await router.isReady(); + + wrapper = mountComponent(UserDetails, { + global: { plugins: [router] }, + piniaOptions: { + initialState: { adminUsers: mockError ? {} : { user: mockUser } }, + stubActions: !mockError, + }, + }); + + const usersStore = useUsersStore(); + if (mockError) vi.mocked(usersStore.fetchUserById).mockRejectedValueOnce(mockError); + + await flushPromises(); + }; + + afterEach(() => { + vi.clearAllMocks(); + wrapper?.unmount(); + }); + + describe("when user loads successfully", () => { + beforeEach(() => mountWrapper()); + + it("displays the title", () => { + expect(wrapper.find("h1").text()).toBe("User Details"); + }); + + it("displays the username in the card title", () => { + expect(wrapper.find(".text-h6").text()).toContain(mockUser.username); + }); + + it("displays admin chip when user is admin", () => { + const chip = wrapper.find('[data-test="user-admin-chip"]'); + expect(chip.exists()).toBe(true); + expect(chip.text()).toContain("Admin"); + }); + + it("displays user uid", () => { + const uidField = wrapper.find('[data-test="user-uid-field"]'); + expect(uidField.text()).toContain("UID:"); + expect(uidField.text()).toContain(mockUser.id); + }); + + it("displays user name", () => { + const nameField = wrapper.find('[data-test="user-name-field"]'); + expect(nameField.text()).toContain("Name:"); + expect(nameField.text()).toContain(mockUser.name); + }); + + it("displays username", () => { + const usernameField = wrapper.find('[data-test="user-username-field"]'); + expect(usernameField.text()).toContain("Username:"); + expect(usernameField.text()).toContain(mockUser.username); + }); + + it("displays email", () => { + const emailField = wrapper.find('[data-test="user-email-field"]'); + expect(emailField.text()).toContain("Email:"); + expect(emailField.text()).toContain(mockUser.email); + }); + + it("displays status", () => { + const statusField = wrapper.find('[data-test="user-status-field"]'); + expect(statusField.text()).toContain("Status:"); + }); + + it("displays created at date", () => { + const createdField = wrapper.find('[data-test="user-created-field"]'); + expect(createdField.text()).toContain("Created:"); + expect(createdField.text()).toContain(formatFullDateTime(mockUser.created_at)); + }); + + it("displays last login field", () => { + const lastLoginField = wrapper.find('[data-test="user-last-login-field"]'); + expect(lastLoginField.text()).toContain("Last Login:"); + }); + + it("displays mfa status", () => { + const row = wrapper.find('[data-test="user-mfa-marketing-row"]'); + expect(row.exists()).toBe(true); + expect(row.text()).toContain("MFA:"); + }); + + it("displays auth methods", () => { + const authField = wrapper.find('[data-test="user-auth-methods-field"]'); + expect(authField.text()).toContain("Auth Methods:"); + expect(authField.text()).toContain("local"); + }); + + it("displays namespace counters", () => { + const row = wrapper.find('[data-test="user-max-namespace-row"]'); + expect(row.text()).toContain("Max Namespaces:"); + expect(row.text()).toContain(String(mockUser.max_namespaces)); + expect(row.text()).toContain("Namespaces Owned:"); + expect(row.text()).toContain(String(mockUser.namespacesOwned)); + }); + }); + + describe("when user fails to load", () => { + it("shows error snackbar", async () => { + await mountWrapper(createAxiosError(404, "Not Found")); + + expect(mockSnackbar.showError).toHaveBeenCalledWith("Failed to get user details."); + }); + }); +}); diff --git a/ui/admin/tests/unit/views/Users.spec.ts b/ui/admin/tests/unit/views/Users.spec.ts new file mode 100644 index 00000000000..4be5b8d1709 --- /dev/null +++ b/ui/admin/tests/unit/views/Users.spec.ts @@ -0,0 +1,46 @@ +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import { VueWrapper } from "@vue/test-utils"; +import { mountComponent } from "@tests/utils/mount"; +import { createCleanAdminRouter } from "@tests/utils/router"; +import Users from "@admin/views/Users.vue"; + +describe("Users", () => { + let wrapper: VueWrapper>; + let router: ReturnType; + + beforeEach(async () => { + router = createCleanAdminRouter(); + await router.push({ name: "users" }); + await router.isReady(); + + wrapper = mountComponent(Users, { global: { plugins: [router] } }); + }); + + afterEach(() => { + wrapper?.unmount(); + }); + + it("displays the page header with correct title", () => { + expect(wrapper.text()).toContain("Users"); + expect(wrapper.text()).toContain("Account Management"); + }); + + it("displays the search input field", () => { + const searchInput = wrapper.find('input[type="text"]'); + expect(searchInput.exists()).toBe(true); + }); + + it("displays the export users button", () => { + expect(wrapper.find('[data-test="users-export-btn"]').exists()).toBe(true); + }); + + it("displays the add user button", () => { + expect(wrapper.find('[data-test="user-add-btn"]').exists()).toBe(true); + }); + + it("displays the users list component", () => { + const list = wrapper.find('[data-test="users-list"]'); + const emptyState = wrapper.find('[data-test="users-empty-state"]'); + expect(list.exists() || emptyState.exists()).toBe(true); + }); +}); diff --git a/ui/eslint.config.js b/ui/eslint.config.js new file mode 100644 index 00000000000..aeb7f91e599 --- /dev/null +++ b/ui/eslint.config.js @@ -0,0 +1,128 @@ +import { defineConfig, globalIgnores } from "eslint/config"; +import js from "@eslint/js"; +import typescriptEslint from "typescript-eslint"; +import importPlugin from "eslint-plugin-import"; +import jestPlugin from "eslint-plugin-jest"; +import vuePlugin from "eslint-plugin-vue"; +import prettierPlugin from "eslint-plugin-prettier"; +import vueParser from "vue-eslint-parser"; +import typeScriptParser from "@typescript-eslint/parser"; +import stylisticPlugin from '@stylistic/eslint-plugin' +import globals from "globals"; + +export default defineConfig([ + globalIgnores(["eslint.config.js", "dist/", "node_modules/", "coverage/", "src/api/client/*.ts"]), + + { files: ["*.js", "*.cjs", "*.mjs", "*.ts", "*.vue"], }, + + js.configs.recommended, + typescriptEslint.configs.eslintRecommended, + typescriptEslint.configs.recommended, + typescriptEslint.configs.recommendedTypeChecked, + ...vuePlugin.configs["flat/recommended"], + stylisticPlugin.configs.recommended, + + { + languageOptions: { + parser: vueParser, + parserOptions: { + parser: typeScriptParser, + extraFileExtensions: [".vue"], + sourceType: "module", + ecmaVersion: 2020, + projectService: true, + }, + globals: { + ...globals.node, + ...globals.browser, + ...globals.jest, + }, + }, + + plugins: { + import: importPlugin, + vue: vuePlugin, + prettier: prettierPlugin, + "@stylistic": stylisticPlugin, + }, + + rules: { + "no-console": process.env.NODE_ENV === "production" ? "warn" : "off", + "no-debugger": process.env.NODE_ENV === "production" ? "warn" : "off", + "global-require": "off", + "no-cond-assign": ["error"], + "no-constant-condition": ["error"], + "no-empty-pattern": ["error"], + "no-redeclare": ["error"], + "no-delete-var": ["error"], + "no-var": ["error"], + "no-plusplus": "off", + "no-shadow": "off", + "no-confusing-arrow": "off", + "object-curly-newline": "off", + + "import/no-unresolved": "off", + "import/no-extraneous-dependencies": ["error", { peerDependencies: true }], + "import/no-cycle": ["off", { ignoreExternal: true }], + "import/extensions": "off", + "import/no-useless-path-segments": "off", + + "vue/max-len": ["error", { code: 140, template: 140 }], + "vue/multi-word-component-names": "off", + "vue/no-v-html": "off", + "vue/no-template-shadow": "off", + "vue/singleline-html-element-content-newline": "off", + + "spaced-comment": [ + "error", + "always", + { + exceptions: ["////"], + markers: ["/"], + }, + ], + + "@typescript-eslint/no-shadow": "off", + "@typescript-eslint/no-use-before-define": "off", + "@typescript-eslint/prefer-readonly-parameter-types": "off", + "@typescript-eslint/no-unused-vars": [ + "error", + { + argsIgnorePattern: "^_", + varsIgnorePattern: "^_", + caughtErrorsIgnorePattern: "^_", + }, + ], + + "@stylistic/quotes": ["error", "double", { avoidEscape: true }], + "@stylistic/indent": ["error", 2], + "@stylistic/semi": ["error", "always"], + "@stylistic/brace-style": ["error", "1tbs", { allowSingleLine: true }], + "@stylistic/max-statements-per-line": "off", + "@stylistic/member-delimiter-style": "off", + "@stylistic/arrow-parens": ["error", "always"], + "@stylistic/quote-props": ["error", "as-needed"], + }, + }, + + // Tests override + { + files: ["**/*.spec.ts", "**/__tests__/*.{j,t}s?(x)", "**/tests/unit/**/*.spec.{j,t}s?(x)"], + plugins: { + jest: jestPlugin, + }, + languageOptions: { + globals: { + ...globals.jest, + }, + }, + rules: { + "@typescript-eslint/unbound-method": "off", + "@typescript-eslint/no-unsafe-call": "off", + "@typescript-eslint/no-unsafe-member-access": "off", + "@typescript-eslint/no-unsafe-assignment": "off", + "jest/unbound-method": "error", + "jest/no-focused-tests": "error", + }, + }, +]); diff --git a/ui/index.html b/ui/index.html index cb0df6eae7c..a07d71d06fa 100755 --- a/ui/index.html +++ b/ui/index.html @@ -3,18 +3,17 @@ - + - ShellHub
+ - diff --git a/ui/nginx.conf b/ui/nginx.conf index 731c6553cd9..9604b0b7882 100644 --- a/ui/nginx.conf +++ b/ui/nginx.conf @@ -7,6 +7,11 @@ server { try_files $uri $uri/ /index.html; } + location ^~ /admin { + root /usr/share/nginx/html; + try_files $uri $uri/ /admin/index.html; + } + error_page 500 502 503 504 /50x.html; location = /50x.html { diff --git a/ui/package-lock.json b/ui/package-lock.json index 2c13feb228a..7b3debdb374 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -9,347 +9,311 @@ "version": "0.0.0", "dependencies": { "@esbuild-plugins/node-globals-polyfill": "^0.2.3", - "@fortawesome/fontawesome-free": "^6.4.0", - "@fortawesome/fontawesome-svg-core": "^6.4.0", - "@fortawesome/free-brands-svg-icons": "^6.4.0", - "@fortawesome/free-regular-svg-icons": "^6.4.0", - "@fortawesome/free-solid-svg-icons": "^6.4.0", - "@mdi/font": "7.2.96", + "@fontsource/anonymous-pro": "^5.2.8", + "@fontsource/fira-code": "^5.2.7", + "@fontsource/inconsolata": "^5.2.8", + "@fontsource/jetbrains-mono": "^5.2.8", + "@fontsource/noto-mono": "^5.2.5", + "@fontsource/source-code-pro": "^5.2.7", + "@fontsource/ubuntu-mono": "^5.2.8", + "@fortawesome/fontawesome-free": "^7.0.0", + "@fortawesome/fontawesome-svg-core": "^7.0.0", + "@fortawesome/free-brands-svg-icons": "^7.0.0", + "@fortawesome/free-regular-svg-icons": "^7.0.0", + "@fortawesome/free-solid-svg-icons": "^7.0.0", + "@mdi/font": "^7.4.47", + "@productdevbook/chatwoot": "github:shellhub-io/chatwoot#build", "@rollup/plugin-inject": "^5.0.3", "@rushstack/eslint-patch": "^1.2.0", "@sentry/tracing": "^7.52.1", "@sentry/vue": "^7.91.0", "@stripe/stripe-js": "^1.38.1", - "@vue/eslint-config-airbnb": "^7.0.0", - "@vue/eslint-config-typescript": "^11.0.2", + "@tinymce/tinymce-vue": "^6.1.0", "@vue/runtime-dom": "^3.2.40", - "@vueuse/core": "^8.2.0", + "@vueuse/core": "^13.0.0", + "@xterm/addon-attach": "^0.11.0", + "@xterm/addon-fit": "^0.10.0", + "@xterm/xterm": "^5.5.0", "assert": "^2.0.0", "axios": "^1.4.0", "dotenv": "^16.0.3", "express": "^4.18.1", + "file-saver": "^2.0.5", "font-logos": "^1.0.0", - "gitter-sidecar": "^1.5.0", - "markdown-it": "^13.0.1", + "markdown-it": "^14.0.0", "moment": "^2.29.4", "node-rsa": "^1.1.1", + "pinia": "^3.0.1", "qrcode.vue": "^3.4.1", - "sass": "^1.55.0", + "sass": "^1.83.0", "sshpk": "^1.17.0", - "unplugin-vue-markdown": "^0.25.2", + "turndown": "^7.2.0", + "unplugin-vue-markdown": "^29.0.0", "vee-validate": "^4.12.4", "vite-node": "^1.2.0", - "vue": "^3.4.5", - "vue-gtag": "^2.0.1", + "vue": "^3.5.13", + "vue-gtag": "^3.0.0", "vue-router": "4", "vuetify": "^3.4.9", - "vuex": "^4.0.2", "webcrypto": "^0.1.1", - "webfontloader": "^1.6.28", - "xterm": "^5.0.0", - "xterm-addon-attach": "^0.8.0", - "xterm-addon-fit": "^0.7.0" + "webfontloader": "^1.6.28" }, "devDependencies": { + "@pinia/testing": "^1.0.3", "@rollup/plugin-node-resolve": "^15.0.1", + "@stylistic/eslint-plugin": "^5.5.0", "@types/node": "^18.8.1", "@types/node-rsa": "^1.1.1", "@types/sshpk": "^1.17.0", "@types/webfontloader": "^1.6.35", - "@typescript-eslint/eslint-plugin": "^5.39.0", - "@typescript-eslint/parser": "^5.39.0", "@vitejs/plugin-vue": "^5.0.3", - "@vitest/coverage-c8": "^0.31.0", - "@vitest/ui": "^0.28.2", - "@vue/test-utils": "^2.1.0", + "@vitest/coverage-v8": "^2.1.8", + "@vitest/ui": "^2.1.8", + "@vue/eslint-config-typescript": "^14.6.0", + "@vue/test-utils": "^2.4.6", + "asciinema-player": "^3.9.0", "axios-mock-adapter": "^1.21.4", "buffer": "^6.0.3", - "eslint": "^8.24.0", - "eslint-plugin-vue": "^9.6.0", + "eslint": "^9.38.0", + "eslint-plugin-import": "^2.32.0", + "eslint-plugin-jest": "^29.0.1", + "eslint-plugin-prettier": "^5.5.4", + "eslint-plugin-vue": "^9.33.0", "events": "^3.3.0", - "jsdom": "^21.0.0", + "jsdom": "^26.0.0", "process": "0.11.10", "resize-observer-polyfill": "^1.5.1", "rollup-plugin-polyfill-node": "^0.12.0", "terser": "^5.15.0", "typescript": "^5.0.4", + "typescript-eslint": "^8.46.2", "util": "^0.12.4", "vi-canvas-mock": "^1.0.0", "vite": "^5.0.11", + "vite-plugin-node-polyfills": "^0.23.0", "vite-plugin-require-transform": "^1.0.21", "vite-plugin-vuetify": "^2.0.1", - "vitest": "^0.31.0", - "vue-cli-plugin-vuetify": "~2.5.8", + "vitest": "^2.1.8", + "vue-cli-plugin-vuetify": "^2.5.8", + "vue-component-type-helpers": "^3.2.2", + "vue-eslint-parser": "^10.2.0", "vue-stripe-js": "^1.0.1", - "vue-tsc": "^1.2.0", + "vue-tsc": "^3.0.0", "yup": "^1.0.0" } }, "node_modules/@ampproject/remapping": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", - "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", - "dev": true, + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", "dependencies": { - "@jridgewell/gen-mapping": "^0.3.0", - "@jridgewell/trace-mapping": "^0.3.9" + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" }, "engines": { "node": ">=6.0.0" } }, - "node_modules/@babel/code-frame": { - "version": "7.22.13", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", - "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", + "node_modules/@asamuzakjp/css-color": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-3.1.1.tgz", + "integrity": "sha512-hpRD68SV2OMcZCsrbdkccTw5FXjNDLo5OuqSHyHZfwweGsDWZwDJ2+gONyNAbazZclobMirACLw0lk8WVxIqxA==", "dev": true, "dependencies": { - "@babel/highlight": "^7.22.13", - "chalk": "^2.4.2" - }, - "engines": { - "node": ">=6.9.0" + "@csstools/css-calc": "^2.1.2", + "@csstools/css-color-parser": "^3.0.8", + "@csstools/css-parser-algorithms": "^3.0.4", + "@csstools/css-tokenizer": "^3.0.3", + "lru-cache": "^10.4.3" } }, - "node_modules/@babel/code-frame/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } + "node_modules/@asamuzakjp/css-color/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true }, - "node_modules/@babel/code-frame/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" }, "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" + "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, + "node_modules/@babel/compat-data": { + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", + "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", "engines": { - "node": ">=0.8.0" + "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, + "node_modules/@babel/core": { + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.8.tgz", + "integrity": "sha512-l+lkXCHS6tQEc5oUpK28xBOZ6+HwaH7YwoYQbLFiYb4nS2/l1tKnZEtEWkD0GuiYdvArf9qBS0XlQGXzPMsNqQ==", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.8", + "@babel/helper-compilation-targets": "^7.26.5", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.7", + "@babel/parser": "^7.26.8", + "@babel/template": "^7.26.8", + "@babel/traverse": "^7.26.8", + "@babel/types": "^7.26.8", + "@types/gensync": "^1.0.0", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, "engines": { - "node": ">=4" + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" } }, - "node_modules/@babel/code-frame/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" + "node_modules/@babel/core/node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "bin": { + "json5": "lib/cli.js" }, "engines": { - "node": ">=4" + "node": ">=6" + } + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" } }, "node_modules/@babel/generator": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz", - "integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==", - "dev": true, + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.8.tgz", + "integrity": "sha512-ef383X5++iZHWAXX0SXQR6ZyQhw/0KtTkrTz61WXRhFM6dhpHulO/RJz79L8S6ugZHJkOOkUrUdxgdF2YiPFnA==", "dependencies": { - "@babel/types": "^7.23.0", - "@jridgewell/gen-mapping": "^0.3.2", - "@jridgewell/trace-mapping": "^0.3.17", - "jsesc": "^2.5.1" + "@babel/parser": "^7.26.8", + "@babel/types": "^7.26.8", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", - "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", - "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", - "dev": true, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.26.5", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz", + "integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==", "dependencies": { - "@babel/template": "^7.22.15", - "@babel/types": "^7.23.0" + "@babel/compat-data": "^7.26.5", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", - "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", - "dev": true, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", "dependencies": { - "@babel/types": "^7.22.5" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz", - "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==", - "dev": true, + "node_modules/@babel/helper-module-transforms": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", "dependencies": { - "@babel/types": "^7.22.5" + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", - "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", - "dev": true, + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", - "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", - "dev": true, + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/highlight": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz", - "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", - "dev": true, - "dependencies": { - "@babel/helper-validator-identifier": "^7.22.20", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0" - }, + "node_modules/@babel/helper-validator-option": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/highlight/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, + "node_modules/@babel/helpers": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.10.tgz", + "integrity": "sha512-UPYc3SauzZ3JGgj87GgZ89JVdC5dj0AoetR5Bw6wj4niittNyFh6+eOGonYvJ1ao6B8lEa3Q3klS7ADZ53bc5g==", + "license": "MIT", "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "@babel/template": "^7.26.9", + "@babel/types": "^7.26.10" }, "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/highlight/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/@babel/highlight/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@babel/highlight/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "engines": { - "node": ">=4" + "node": ">=6.9.0" } }, - "node_modules/@babel/highlight/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, + "node_modules/@babel/parser": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.10.tgz", + "integrity": "sha512-6aQR2zGE/QFi8JpDLjUZEPYOs7+mhKXm86VaKFiLP35JQwQb6bwUE+XbvkH0EptsYhbNBSUGaUBLKqxH1xSgsA==", + "license": "MIT", "dependencies": { - "has-flag": "^3.0.0" + "@babel/types": "^7.26.10" }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/parser": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.6.tgz", - "integrity": "sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==", "bin": { "parser": "bin/babel-parser.js" }, @@ -358,45 +322,51 @@ } }, "node_modules/@babel/runtime": { - "version": "7.21.5", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.5.tgz", - "integrity": "sha512-8jI69toZqqcsnqGGqwGS4Qb1VwLOEp4hz+CXPywcvjs60u3B4Pom/U/7rm4W8tMOYEB+E9wgD0mW1l3r8qlI9Q==", + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.10.tgz", + "integrity": "sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==", + "dev": true, + "license": "MIT", "dependencies": { - "regenerator-runtime": "^0.13.11" + "regenerator-runtime": "^0.14.0" }, "engines": { "node": ">=6.9.0" } }, + "node_modules/@babel/standalone": { + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/standalone/-/standalone-7.26.8.tgz", + "integrity": "sha512-WS5Cw/8gWP9qBJ+qPUVr5Le4bCeXTMoVHF9TofgEqAUpEgvVzNXCPf97SNLuDpSRNHNWcH2lFixGUGjaM6VVCg==", + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/template": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", - "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", - "dev": true, + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz", + "integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==", + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.22.13", - "@babel/parser": "^7.22.15", - "@babel/types": "^7.22.15" + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.26.9", + "@babel/types": "^7.26.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.23.2", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz", - "integrity": "sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.22.13", - "@babel/generator": "^7.23.0", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.23.0", - "@babel/types": "^7.23.0", - "debug": "^4.1.0", + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.8.tgz", + "integrity": "sha512-nic9tRkjYH0oB2dzr/JoGIm+4Q6SuYeLEiIiZDwBscRMYFJ+tMAz98fuel9ZnbXViA2I0HVSSRRK8DW5fjXStA==", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.8", + "@babel/parser": "^7.26.8", + "@babel/template": "^7.26.8", + "@babel/types": "^7.26.8", + "debug": "^4.3.1", "globals": "^11.1.0" }, "engines": { @@ -407,20 +377,18 @@ "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, "engines": { "node": ">=4" } }, "node_modules/@babel/types": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz", - "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==", - "dev": true, + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.10.tgz", + "integrity": "sha512-emqcG3vHrpxUKTrxcblR36dcrcoRDvKmnL/dCL6ZsHaShW80qxCAcNhzQZrpeM765VzEos+xOi4s+r4IXzTwdQ==", + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.20", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -432,6 +400,116 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, + "node_modules/@csstools/color-helpers": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.0.2.tgz", + "integrity": "sha512-JqWH1vsgdGcw2RR6VliXXdA0/59LttzlU8UlRT/iUUsEeWfYq8I+K0yhihEUTTHLRm1EXvpsCx3083EU15ecsA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@csstools/css-calc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.2.tgz", + "integrity": "sha512-TklMyb3uBB28b5uQdxjReG4L80NxAqgrECqLZFQbyLekwwlcDDS8r3f07DKqeo8C4926Br0gf/ZDe17Zv4wIuw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^3.0.4", + "@csstools/css-tokenizer": "^3.0.3" + } + }, + "node_modules/@csstools/css-color-parser": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.0.8.tgz", + "integrity": "sha512-pdwotQjCCnRPuNi06jFuP68cykU1f3ZWExLe/8MQ1LOs8Xq+fTkYgd+2V8mWUWMrOn9iS2HftPVaMZDaXzGbhQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/color-helpers": "^5.0.2", + "@csstools/css-calc": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^3.0.4", + "@csstools/css-tokenizer": "^3.0.3" + } + }, + "node_modules/@csstools/css-parser-algorithms": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.4.tgz", + "integrity": "sha512-Up7rBoV77rv29d3uKHUIVubz1BTcgyUK72IvCQAbfbMv584xHcGKCKbWh7i8hPrRJ7qU4Y8IO3IY9m+iTB7P3A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@csstools/css-tokenizer": "^3.0.3" + } + }, + "node_modules/@csstools/css-tokenizer": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.3.tgz", + "integrity": "sha512-UJnjoFsmxfKUdNYdWgOB0mWUypuLvAfQPH1+pyvRJs6euowbFkFC6P13w1l8mJyi3vxYMxc9kld5jZEGRQs6bw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild-plugins/node-globals-polyfill": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@esbuild-plugins/node-globals-polyfill/-/node-globals-polyfill-0.2.3.tgz", @@ -441,9 +519,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.11.tgz", - "integrity": "sha512-FnzU0LyE3ySQk7UntJO4+qIiQgI7KoODnZg5xzXIrFJlKd2P2gwHsHY4927xj9y5PJmJSzULiUCWmv7iWnNa7g==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.0.tgz", + "integrity": "sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==", "cpu": [ "ppc64" ], @@ -451,14 +529,15 @@ "os": [ "aix" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-arm": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.11.tgz", - "integrity": "sha512-5OVapq0ClabvKvQ58Bws8+wkLCV+Rxg7tUVbo9xu034Nm536QTII4YzhaFriQ7rMrorfnFKUsArD2lqKbFY4vw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.0.tgz", + "integrity": "sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==", "cpu": [ "arm" ], @@ -466,14 +545,15 @@ "os": [ "android" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-arm64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.11.tgz", - "integrity": "sha512-aiu7K/5JnLj//KOnOfEZ0D90obUkRzDMyqd/wNAUQ34m4YUPVhRZpnqKV9uqDGxT7cToSDnIHsGooyIczu9T+Q==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.0.tgz", + "integrity": "sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==", "cpu": [ "arm64" ], @@ -481,14 +561,15 @@ "os": [ "android" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-x64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.11.tgz", - "integrity": "sha512-eccxjlfGw43WYoY9QgB82SgGgDbibcqyDTlk3l3C0jOVHKxrjdc9CTwDUQd0vkvYg5um0OH+GpxYvp39r+IPOg==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.0.tgz", + "integrity": "sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==", "cpu": [ "x64" ], @@ -496,14 +577,15 @@ "os": [ "android" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.11.tgz", - "integrity": "sha512-ETp87DRWuSt9KdDVkqSoKoLFHYTrkyz2+65fj9nfXsaV3bMhTCjtQfw3y+um88vGRKRiF7erPrh/ZuIdLUIVxQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.0.tgz", + "integrity": "sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==", "cpu": [ "arm64" ], @@ -511,14 +593,15 @@ "os": [ "darwin" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.11.tgz", - "integrity": "sha512-fkFUiS6IUK9WYUO/+22omwetaSNl5/A8giXvQlcinLIjVkxwTLSktbF5f/kJMftM2MJp9+fXqZ5ezS7+SALp4g==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.0.tgz", + "integrity": "sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==", "cpu": [ "x64" ], @@ -526,14 +609,15 @@ "os": [ "darwin" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.11.tgz", - "integrity": "sha512-lhoSp5K6bxKRNdXUtHoNc5HhbXVCS8V0iZmDvyWvYq9S5WSfTIHU2UGjcGt7UeS6iEYp9eeymIl5mJBn0yiuxA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.0.tgz", + "integrity": "sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==", "cpu": [ "arm64" ], @@ -541,14 +625,15 @@ "os": [ "freebsd" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.11.tgz", - "integrity": "sha512-JkUqn44AffGXitVI6/AbQdoYAq0TEullFdqcMY/PCUZ36xJ9ZJRtQabzMA+Vi7r78+25ZIBosLTOKnUXBSi1Kw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.0.tgz", + "integrity": "sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==", "cpu": [ "x64" ], @@ -556,14 +641,15 @@ "os": [ "freebsd" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-arm": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.11.tgz", - "integrity": "sha512-3CRkr9+vCV2XJbjwgzjPtO8T0SZUmRZla+UL1jw+XqHZPkPgZiyWvbDvl9rqAN8Zl7qJF0O/9ycMtjU67HN9/Q==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.0.tgz", + "integrity": "sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==", "cpu": [ "arm" ], @@ -571,14 +657,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.11.tgz", - "integrity": "sha512-LneLg3ypEeveBSMuoa0kwMpCGmpu8XQUh+mL8XXwoYZ6Be2qBnVtcDI5azSvh7vioMDhoJFZzp9GWp9IWpYoUg==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.0.tgz", + "integrity": "sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==", "cpu": [ "arm64" ], @@ -586,14 +673,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.11.tgz", - "integrity": "sha512-caHy++CsD8Bgq2V5CodbJjFPEiDPq8JJmBdeyZ8GWVQMjRD0sU548nNdwPNvKjVpamYYVL40AORekgfIubwHoA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.0.tgz", + "integrity": "sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==", "cpu": [ "ia32" ], @@ -601,14 +689,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.11.tgz", - "integrity": "sha512-ppZSSLVpPrwHccvC6nQVZaSHlFsvCQyjnvirnVjbKSHuE5N24Yl8F3UwYUUR1UEPaFObGD2tSvVKbvR+uT1Nrg==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.0.tgz", + "integrity": "sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==", "cpu": [ "loong64" ], @@ -616,14 +705,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.11.tgz", - "integrity": "sha512-B5x9j0OgjG+v1dF2DkH34lr+7Gmv0kzX6/V0afF41FkPMMqaQ77pH7CrhWeR22aEeHKaeZVtZ6yFwlxOKPVFyg==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.0.tgz", + "integrity": "sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==", "cpu": [ "mips64el" ], @@ -631,14 +721,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.11.tgz", - "integrity": "sha512-MHrZYLeCG8vXblMetWyttkdVRjQlQUb/oMgBNurVEnhj4YWOr4G5lmBfZjHYQHHN0g6yDmCAQRR8MUHldvvRDA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.0.tgz", + "integrity": "sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==", "cpu": [ "ppc64" ], @@ -646,14 +737,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.11.tgz", - "integrity": "sha512-f3DY++t94uVg141dozDu4CCUkYW+09rWtaWfnb3bqe4w5NqmZd6nPVBm+qbz7WaHZCoqXqHz5p6CM6qv3qnSSQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.0.tgz", + "integrity": "sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==", "cpu": [ "riscv64" ], @@ -661,14 +753,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.11.tgz", - "integrity": "sha512-A5xdUoyWJHMMlcSMcPGVLzYzpcY8QP1RtYzX5/bS4dvjBGVxdhuiYyFwp7z74ocV7WDc0n1harxmpq2ePOjI0Q==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.0.tgz", + "integrity": "sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==", "cpu": [ "s390x" ], @@ -676,14 +769,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-x64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.11.tgz", - "integrity": "sha512-grbyMlVCvJSfxFQUndw5mCtWs5LO1gUlwP4CDi4iJBbVpZcqLVT29FxgGuBJGSzyOxotFG4LoO5X+M1350zmPA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.0.tgz", + "integrity": "sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==", "cpu": [ "x64" ], @@ -691,14 +785,15 @@ "os": [ "linux" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.11.tgz", - "integrity": "sha512-13jvrQZJc3P230OhU8xgwUnDeuC/9egsjTkXN49b3GcS5BKvJqZn86aGM8W9pd14Kd+u7HuFBMVtrNGhh6fHEQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.0.tgz", + "integrity": "sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==", "cpu": [ "x64" ], @@ -706,14 +801,31 @@ "os": [ "netbsd" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.0.tgz", + "integrity": "sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "peer": true, + "engines": { + "node": ">=18" } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.11.tgz", - "integrity": "sha512-ysyOGZuTp6SNKPE11INDUeFVVQFrhcNDVUgSQVDzqsqX38DjhPEPATpid04LCoUr2WXhQTEZ8ct/EgJCUDpyNw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.0.tgz", + "integrity": "sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==", "cpu": [ "x64" ], @@ -721,14 +833,15 @@ "os": [ "openbsd" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.11.tgz", - "integrity": "sha512-Hf+Sad9nVwvtxy4DXCZQqLpgmRTQqyFyhT3bZ4F2XlJCjxGmRFF0Shwn9rzhOYRB61w9VMXUkxlBy56dk9JJiQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.0.tgz", + "integrity": "sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==", "cpu": [ "x64" ], @@ -736,14 +849,15 @@ "os": [ "sunos" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.11.tgz", - "integrity": "sha512-0P58Sbi0LctOMOQbpEOvOL44Ne0sqbS0XWHMvvrg6NE5jQ1xguCSSw9jQeUk2lfrXYsKDdOe6K+oZiwKPilYPQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.0.tgz", + "integrity": "sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==", "cpu": [ "arm64" ], @@ -751,14 +865,15 @@ "os": [ "win32" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.11.tgz", - "integrity": "sha512-6YOrWS+sDJDmshdBIQU+Uoyh7pQKrdykdefC1avn76ss5c+RN6gut3LZA4E2cH5xUEp5/cA0+YxRaVtRAb0xBg==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.0.tgz", + "integrity": "sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==", "cpu": [ "ia32" ], @@ -766,14 +881,15 @@ "os": [ "win32" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-x64": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.11.tgz", - "integrity": "sha512-vfkhltrjCAb603XaFhqhAF4LGDi2M4OrCRrFusyQ+iTLQ/o60QQXxc9cZC/FFpihBI9N1Grn6SMKVJ4KP7Fuiw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.0.tgz", + "integrity": "sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==", "cpu": [ "x64" ], @@ -781,41 +897,92 @@ "os": [ "win32" ], + "peer": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", - "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "version": "4.9.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz", + "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==", + "dev": true, + "license": "MIT", "dependencies": { - "eslint-visitor-keys": "^3.3.0" + "eslint-visitor-keys": "^3.4.3" }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, + "funding": { + "url": "https://opencollective.com/eslint" + }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "node_modules/@eslint-community/regexpp": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.1.tgz", - "integrity": "sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==", + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", + "dev": true, + "license": "MIT", "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } }, + "node_modules/@eslint/config-array": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.7", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/@eslint/eslintrc": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.3.tgz", - "integrity": "sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "license": "MIT", "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", - "espree": "^9.5.2", - "globals": "^13.19.0", + "espree": "^10.0.1", + "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", @@ -823,103 +990,240 @@ "strip-json-comments": "^3.1.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "url": "https://opencollective.com/eslint" } }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@eslint/js": { - "version": "8.41.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.41.0.tgz", - "integrity": "sha512-LxcyMGxwmTh2lY9FwHPGWOHmYFCZvbrFCBZL4FzSSsxsRPuhrYUg/49/0KDfW8tnIEaEHtfmn6+NPN+1DqaNmA==", + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", + "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", + "dev": true, + "license": "MIT", "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" } }, - "node_modules/@fortawesome/fontawesome-common-types": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-6.5.1.tgz", - "integrity": "sha512-GkWzv+L6d2bI5f/Vk6ikJ9xtl7dfXtoRu3YGE6nq0p/FFqA1ebMOAWg3XgRyb0I6LYyYkiAo+3/KrwuBp8xG7A==", - "hasInstallScript": true, + "node_modules/@eslint/object-schema": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", + "dev": true, + "license": "Apache-2.0", "engines": { - "node": ">=6" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@fontsource/anonymous-pro": { + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/@fontsource/anonymous-pro/-/anonymous-pro-5.2.8.tgz", + "integrity": "sha512-nCK3XLqSyTjWw94vtgaI5n8j+jsxz7Eds5S38il0DSZ+lBqs/O08m41iXqmm+i/pXqQb/KBxA6hck3hbozZQiA==", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@fontsource/fira-code": { + "version": "5.2.7", + "resolved": "https://registry.npmjs.org/@fontsource/fira-code/-/fira-code-5.2.7.tgz", + "integrity": "sha512-tnB9NNund9TwIym8/7DMJe573nlPEQb+fKUV5GL8TBYXjIhDvL0D7mgmNVNQUPhXp+R7RylQeiBdkA4EbOHPGQ==", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@fontsource/inconsolata": { + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/@fontsource/inconsolata/-/inconsolata-5.2.8.tgz", + "integrity": "sha512-lIZW+WOZYpUH91g9r6rYYhfTmptF3YPPM54ZOs8IYVeeL4SeiAu4tfj7mdr8llYEq31DLYgi6JtGIJa192gB0Q==", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@fontsource/jetbrains-mono": { + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/@fontsource/jetbrains-mono/-/jetbrains-mono-5.2.8.tgz", + "integrity": "sha512-6w8/SG4kqvIMu7xd7wt6x3idn1Qux3p9N62s6G3rfldOUYHpWcc2FKrqf+Vo44jRvqWj2oAtTHrZXEP23oSKwQ==", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@fontsource/noto-mono": { + "version": "5.2.5", + "resolved": "https://registry.npmjs.org/@fontsource/noto-mono/-/noto-mono-5.2.5.tgz", + "integrity": "sha512-mWcCCJtB7CneVkqDz8WzzXXnAgFldJmkVL3AX815RjKaEP+dEGzxjVX89080wmXQ322Z6vcb+PI9qP9RbafKcg==", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@fontsource/source-code-pro": { + "version": "5.2.7", + "resolved": "https://registry.npmjs.org/@fontsource/source-code-pro/-/source-code-pro-5.2.7.tgz", + "integrity": "sha512-7papq9TH94KT+S5VSY8cU7tFmwuGkIe3qxXRMscuAXH6AjMU+KJI75f28FzgBVDrlMfA0jjlTV4/x5+H5o/5EQ==", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@fontsource/ubuntu-mono": { + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/@fontsource/ubuntu-mono/-/ubuntu-mono-5.2.8.tgz", + "integrity": "sha512-N4nT8+GYWWcDODVSBLU4f3PycZQH9YIF6SypDI0rti3HrLqcXFS9u6wMvAd9vx1FmjAQkYv7Ld9CZ8XYTEOD0A==", + "funding": { + "url": "https://github.com/sponsors/ayuhito" } }, "node_modules/@fortawesome/fontawesome-free": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-6.5.1.tgz", - "integrity": "sha512-CNy5vSwN3fsUStPRLX7fUYojyuzoEMSXPl7zSLJ8TgtRfjv24LOnOWKT2zYwaHZCJGkdyRnTmstR0P+Ah503Gw==", - "hasInstallScript": true, + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-7.1.0.tgz", + "integrity": "sha512-+WxNld5ZCJHvPQCr/GnzCTVREyStrAJjisUPtUxG5ngDA8TMlPnKp6dddlTpai4+1GNmltAeuk1hJEkBohwZYA==", + "license": "(CC-BY-4.0 AND OFL-1.1 AND MIT)", "engines": { "node": ">=6" } }, "node_modules/@fortawesome/fontawesome-svg-core": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-6.5.1.tgz", - "integrity": "sha512-MfRCYlQPXoLlpem+egxjfkEuP9UQswTrlCOsknus/NcMoblTH2g0jPrapbcIb04KGA7E2GZxbAccGZfWoYgsrQ==", - "hasInstallScript": true, + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-7.1.0.tgz", + "integrity": "sha512-fNxRUk1KhjSbnbuBxlWSnBLKLBNun52ZBTcs22H/xEEzM6Ap81ZFTQ4bZBxVQGQgVY0xugKGoRcCbaKjLQ3XZA==", + "license": "MIT", "dependencies": { - "@fortawesome/fontawesome-common-types": "6.5.1" + "@fortawesome/fontawesome-common-types": "7.1.0" }, "engines": { "node": ">=6" } }, + "node_modules/@fortawesome/fontawesome-svg-core/node_modules/@fortawesome/fontawesome-common-types": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-7.1.0.tgz", + "integrity": "sha512-l/BQM7fYntsCI//du+6sEnHOP6a74UixFyOYUyz2DLMXKx+6DEhfR3F2NYGE45XH1JJuIamacb4IZs9S0ZOWLA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/@fortawesome/free-brands-svg-icons": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/@fortawesome/free-brands-svg-icons/-/free-brands-svg-icons-6.5.1.tgz", - "integrity": "sha512-093l7DAkx0aEtBq66Sf19MgoZewv1zeY9/4C7vSKPO4qMwEsW/2VYTUTpBtLwfb9T2R73tXaRDPmE4UqLCYHfg==", - "hasInstallScript": true, + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/free-brands-svg-icons/-/free-brands-svg-icons-7.1.0.tgz", + "integrity": "sha512-9byUd9bgNfthsZAjBl6GxOu1VPHgBuRUP9juI7ZoM98h8xNPTCTagfwUFyYscdZq4Hr7gD1azMfM9s5tIWKZZA==", + "license": "(CC-BY-4.0 AND MIT)", "dependencies": { - "@fortawesome/fontawesome-common-types": "6.5.1" + "@fortawesome/fontawesome-common-types": "7.1.0" }, "engines": { "node": ">=6" } }, + "node_modules/@fortawesome/free-brands-svg-icons/node_modules/@fortawesome/fontawesome-common-types": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-7.1.0.tgz", + "integrity": "sha512-l/BQM7fYntsCI//du+6sEnHOP6a74UixFyOYUyz2DLMXKx+6DEhfR3F2NYGE45XH1JJuIamacb4IZs9S0ZOWLA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/@fortawesome/free-regular-svg-icons": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/@fortawesome/free-regular-svg-icons/-/free-regular-svg-icons-6.5.1.tgz", - "integrity": "sha512-m6ShXn+wvqEU69wSP84coxLbNl7sGVZb+Ca+XZq6k30SzuP3X4TfPqtycgUh9ASwlNh5OfQCd8pDIWxl+O+LlQ==", - "hasInstallScript": true, + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/free-regular-svg-icons/-/free-regular-svg-icons-7.1.0.tgz", + "integrity": "sha512-0e2fdEyB4AR+e6kU4yxwA/MonnYcw/CsMEP9lH82ORFi9svA6/RhDyhxIv5mlJaldmaHLLYVTb+3iEr+PDSZuQ==", + "license": "(CC-BY-4.0 AND MIT)", "dependencies": { - "@fortawesome/fontawesome-common-types": "6.5.1" + "@fortawesome/fontawesome-common-types": "7.1.0" }, "engines": { "node": ">=6" } }, + "node_modules/@fortawesome/free-regular-svg-icons/node_modules/@fortawesome/fontawesome-common-types": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-7.1.0.tgz", + "integrity": "sha512-l/BQM7fYntsCI//du+6sEnHOP6a74UixFyOYUyz2DLMXKx+6DEhfR3F2NYGE45XH1JJuIamacb4IZs9S0ZOWLA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/@fortawesome/free-solid-svg-icons": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-6.5.1.tgz", - "integrity": "sha512-S1PPfU3mIJa59biTtXJz1oI0+KAXW6bkAb31XKhxdxtuXDiUIFsih4JR1v5BbxY7hVHsD1RKq+jRkVRaf773NQ==", - "hasInstallScript": true, + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-7.1.0.tgz", + "integrity": "sha512-Udu3K7SzAo9N013qt7qmm22/wo2hADdheXtBfxFTecp+ogsc0caQNRKEb7pkvvagUGOpG9wJC1ViH6WXs8oXIA==", + "license": "(CC-BY-4.0 AND MIT)", "dependencies": { - "@fortawesome/fontawesome-common-types": "6.5.1" + "@fortawesome/fontawesome-common-types": "7.1.0" }, "engines": { "node": ">=6" } }, - "node_modules/@humanwhocodes/config-array": { - "version": "0.11.8", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", - "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "node_modules/@fortawesome/free-solid-svg-icons/node_modules/@fortawesome/fontawesome-common-types": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-7.1.0.tgz", + "integrity": "sha512-l/BQM7fYntsCI//du+6sEnHOP6a74UixFyOYUyz2DLMXKx+6DEhfR3F2NYGE45XH1JJuIamacb4IZs9S0ZOWLA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "dev": true, + "license": "Apache-2.0", "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.5" + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.4.0" }, "engines": { - "node": ">=10.10.0" + "node": ">=18.18.0" } }, "node_modules/@humanwhocodes/module-importer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, "engines": { "node": ">=12.22" }, @@ -928,10 +1232,109 @@ "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==" + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", @@ -943,33 +1346,40 @@ } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", - "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", - "devOptional": true, + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", "dependencies": { - "@jridgewell/set-array": "^1.0.1", + "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" + "@jridgewell/trace-mapping": "^0.3.24" }, "engines": { "node": ">=6.0.0" } }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", - "devOptional": true, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", - "devOptional": true, + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", "engines": { "node": ">=6.0.0" } @@ -985,75 +1395,507 @@ } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==" + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.18", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", - "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", - "devOptional": true, + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@mdi/font": { + "version": "7.4.47", + "resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.4.47.tgz", + "integrity": "sha512-43MtGpd585SNzHZPcYowu/84Vz2a2g31TvPMTm9uTiCSWzaheQySUcSyUH/46fPnuPQWof2yd0pGBtzee/IQWw==" + }, + "node_modules/@mdit-vue/plugin-component": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@mdit-vue/plugin-component/-/plugin-component-3.0.2.tgz", + "integrity": "sha512-Fu53MajrZMOAjOIPGMTdTXgHLgGU9KwTqKtYc6WNYtFZNKw04euSfJ/zFg8eBY/2MlciVngkF7Gyc2IL7e8Bsw==", + "license": "MIT", + "dependencies": { + "@types/markdown-it": "^14.1.2", + "markdown-it": "^14.1.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@mdit-vue/plugin-frontmatter": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@mdit-vue/plugin-frontmatter/-/plugin-frontmatter-3.0.2.tgz", + "integrity": "sha512-QKKgIva31YtqHgSAz7S7hRcL7cHXiqdog4wxTfxeQCHo+9IP4Oi5/r1Y5E93nTPccpadDWzAwr3A0F+kAEnsVQ==", + "license": "MIT", "dependencies": { - "@jridgewell/resolve-uri": "3.1.0", - "@jridgewell/sourcemap-codec": "1.4.14" + "@mdit-vue/types": "3.0.2", + "@types/markdown-it": "^14.1.2", + "gray-matter": "^4.0.3", + "markdown-it": "^14.1.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@mdit-vue/types": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@mdit-vue/types/-/types-3.0.2.tgz", + "integrity": "sha512-00aAZ0F0NLik6I6Yba2emGbHLxv+QYrPH00qQ5dFKXlAo1Ll2RHDXwY7nN2WAfrx2pP+WrvSRFTGFCNGdzBDHw==", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@mixmark-io/domino": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@mixmark-io/domino/-/domino-2.2.0.tgz", + "integrity": "sha512-Y28PR25bHXUg88kCV7nivXrP2Nj2RueZ3/l/jdx6J9f8J4nsEGcgX0Qe6lt7Pa+J79+kPiJU3LguR6O/6zrLOw==", + "license": "BSD-2-Clause" + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nuxt/kit": { + "version": "3.15.4", + "resolved": "https://registry.npmjs.org/@nuxt/kit/-/kit-3.15.4.tgz", + "integrity": "sha512-dr7I7eZOoRLl4uxdxeL2dQsH0OrbEiVPIyBHnBpA4co24CBnoJoF+JINuP9l3PAM3IhUzc5JIVq3/YY3lEc3Hw==", + "dependencies": { + "c12": "^2.0.1", + "consola": "^3.4.0", + "defu": "^6.1.4", + "destr": "^2.0.3", + "globby": "^14.0.2", + "ignore": "^7.0.3", + "jiti": "^2.4.2", + "klona": "^2.0.6", + "knitwork": "^1.2.0", + "mlly": "^1.7.4", + "ohash": "^1.1.4", + "pathe": "^2.0.2", + "pkg-types": "^1.3.1", + "scule": "^1.3.0", + "semver": "^7.6.3", + "std-env": "^3.8.0", + "ufo": "^1.5.4", + "unctx": "^2.4.1", + "unimport": "^4.0.0", + "untyped": "^1.5.2" + }, + "engines": { + "node": ">=18.12.0" + } + }, + "node_modules/@nuxt/kit/node_modules/globby": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@nuxt/kit/node_modules/ignore": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.3.tgz", + "integrity": "sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@nuxt/kit/node_modules/path-type": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@nuxt/kit/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, + "node_modules/@nuxt/kit/node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@one-ini/wasm": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@one-ini/wasm/-/wasm-0.1.1.tgz", + "integrity": "sha512-XuySG1E38YScSJoMlqovLru4KTUNSjgVTIjyh7qMX6aNN5HY5Ct5LhRJdxO79JtTzKfzV/bnWpz+zquYrISsvw==", + "dev": true + }, + "node_modules/@parcel/watcher": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.4.1.tgz", + "integrity": "sha512-HNjmfLQEVRZmHRET336f20H/8kOozUGwk7yajvsonjNxbj2wBTK1WsQuHkD5yYh9RxFGL2EyDHryOihOwUoKDA==", + "optional": true, + "dependencies": { + "detect-libc": "^1.0.3", + "is-glob": "^4.0.3", + "micromatch": "^4.0.5", + "node-addon-api": "^7.0.0" + }, + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "@parcel/watcher-android-arm64": "2.4.1", + "@parcel/watcher-darwin-arm64": "2.4.1", + "@parcel/watcher-darwin-x64": "2.4.1", + "@parcel/watcher-freebsd-x64": "2.4.1", + "@parcel/watcher-linux-arm-glibc": "2.4.1", + "@parcel/watcher-linux-arm64-glibc": "2.4.1", + "@parcel/watcher-linux-arm64-musl": "2.4.1", + "@parcel/watcher-linux-x64-glibc": "2.4.1", + "@parcel/watcher-linux-x64-musl": "2.4.1", + "@parcel/watcher-win32-arm64": "2.4.1", + "@parcel/watcher-win32-ia32": "2.4.1", + "@parcel/watcher-win32-x64": "2.4.1" + } + }, + "node_modules/@parcel/watcher-android-arm64": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.4.1.tgz", + "integrity": "sha512-LOi/WTbbh3aTn2RYddrO8pnapixAziFl6SMxHM69r3tvdSm94JtCenaKgk1GRg5FJ5wpMCpHeW+7yqPlvZv7kg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-darwin-arm64": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.4.1.tgz", + "integrity": "sha512-ln41eihm5YXIY043vBrrHfn94SIBlqOWmoROhsMVTSXGh0QahKGy77tfEywQ7v3NywyxBBkGIfrWRHm0hsKtzA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-darwin-x64": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.4.1.tgz", + "integrity": "sha512-yrw81BRLjjtHyDu7J61oPuSoeYWR3lDElcPGJyOvIXmor6DEo7/G2u1o7I38cwlcoBHQFULqF6nesIX3tsEXMg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-freebsd-x64": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.4.1.tgz", + "integrity": "sha512-TJa3Pex/gX3CWIx/Co8k+ykNdDCLx+TuZj3f3h7eOjgpdKM+Mnix37RYsYU4LHhiYJz3DK5nFCCra81p6g050w==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm-glibc": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.4.1.tgz", + "integrity": "sha512-4rVYDlsMEYfa537BRXxJ5UF4ddNwnr2/1O4MHM5PjI9cvV2qymvhwZSFgXqbS8YoTk5i/JR0L0JDs69BUn45YA==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm64-glibc": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.4.1.tgz", + "integrity": "sha512-BJ7mH985OADVLpbrzCLgrJ3TOpiZggE9FMblfO65PlOCdG++xJpKUJ0Aol74ZUIYfb8WsRlUdgrZxKkz3zXWYA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm64-musl": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.4.1.tgz", + "integrity": "sha512-p4Xb7JGq3MLgAfYhslU2SjoV9G0kI0Xry0kuxeG/41UfpjHGOhv7UoUDAz/jb1u2elbhazy4rRBL8PegPJFBhA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-x64-glibc": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.4.1.tgz", + "integrity": "sha512-s9O3fByZ/2pyYDPoLM6zt92yu6P4E39a03zvO0qCHOTjxmt3GHRMLuRZEWhWLASTMSrrnVNWdVI/+pUElJBBBg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-x64-musl": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.4.1.tgz", + "integrity": "sha512-L2nZTYR1myLNST0O632g0Dx9LyMNHrn6TOt76sYxWLdff3cB22/GZX2UPtJnaqQPdCRoszoY5rcOj4oMTtp5fQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-arm64": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.4.1.tgz", + "integrity": "sha512-Uq2BPp5GWhrq/lcuItCHoqxjULU1QYEcyjSO5jqqOK8RNFDBQnenMMx4gAl3v8GiWa59E9+uDM7yZ6LxwUIfRg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-ia32": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.4.1.tgz", + "integrity": "sha512-maNRit5QQV2kgHFSYwftmPBxiuK5u4DXjbXx7q6eKjq5dsLXZ4FJiVvlcw35QXzk0KrUecJmuVFbj4uV9oYrcw==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-x64": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.4.1.tgz", + "integrity": "sha512-+DvS92F9ezicfswqrvIRM2njcYJbd5mb9CUgtrHCHmvn7pPPa+nMDRu1o1bYYz/l5IB2NVGNJWiH7h1E58IF2A==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" } }, - "node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.14", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", - "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", - "devOptional": true - }, - "node_modules/@mdi/font": { - "version": "7.2.96", - "resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.2.96.tgz", - "integrity": "sha512-e//lmkmpFUMZKhmCY9zdjRe4zNXfbOIJnn6xveHbaV2kSw5aJ5dLXUxcRt1Gxfi7ZYpFLUWlkG2MGSFAiqAu7w==" - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" + "node_modules/@pinia/testing": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@pinia/testing/-/testing-1.0.3.tgz", + "integrity": "sha512-g+qR49GNdI1Z8rZxKrQC3GN+LfnGTNf5Kk8Nz5Cz6mIGva5WRS+ffPXQfzhA0nu6TveWzPNYTjGl4nJqd3Cu9Q==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/posva" }, - "engines": { - "node": ">= 8" + "peerDependencies": { + "pinia": ">=3.0.4" } }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, "engines": { - "node": ">= 8" + "node": ">=14" } }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, + "node_modules/@pkgr/core": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz", + "integrity": "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==", + "dev": true, + "license": "MIT", "engines": { - "node": ">= 8" + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" } }, - "node_modules/@one-ini/wasm": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@one-ini/wasm/-/wasm-0.1.1.tgz", - "integrity": "sha512-XuySG1E38YScSJoMlqovLru4KTUNSjgVTIjyh7qMX6aNN5HY5Ct5LhRJdxO79JtTzKfzV/bnWpz+zquYrISsvw==", - "dev": true - }, "node_modules/@polka/url": { - "version": "1.0.0-next.21", - "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz", - "integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==", + "version": "1.0.0-next.28", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.28.tgz", + "integrity": "sha512-8LduaNlMZGwdZ6qWrKlfa+2M4gahzFkprZiAt2TF8uS0qQgBizKXpXURqvTJ4WtmupWxaLqjRb2UCTe72mu+Aw==", "dev": true }, + "node_modules/@productdevbook/chatwoot": { + "version": "1.7.0", + "resolved": "git+ssh://git@github.com/shellhub-io/chatwoot.git#66251d370d3040dd5ca306f94e0d04914f493ed7", + "license": "MIT", + "dependencies": { + "@nuxt/kit": "^3.15.4", + "defu": "^6.1.4" + }, + "funding": { + "url": "https://github.com/sponsors/productdevbook" + }, + "peerDependencies": { + "vue": ">=3.3.0" + } + }, "node_modules/@rollup/plugin-inject": { "version": "5.0.5", "resolved": "https://registry.npmjs.org/@rollup/plugin-inject/-/plugin-inject-5.0.5.tgz", @@ -1076,15 +1918,14 @@ } }, "node_modules/@rollup/plugin-node-resolve": { - "version": "15.2.3", - "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.2.3.tgz", - "integrity": "sha512-j/lym8nf5E21LwBT4Df1VD6hRO2L2iwUeUmP7litikRsVp1H6NWx20NEp0Y7su+7XGc476GnXXc4kFeZNGmaSQ==", + "version": "15.3.1", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.3.1.tgz", + "integrity": "sha512-tgg6b91pAybXHJQMAAwW9VuWBO6Thi+q7BCNARLwSqlmsHz0XYURtGvh/AuwSADXSI4h/2uHbs7s4FzlZDGSGA==", "dev": true, "dependencies": { "@rollup/pluginutils": "^5.0.1", "@types/resolve": "1.20.2", "deepmerge": "^4.2.2", - "is-builtin-module": "^3.2.1", "is-module": "^1.0.0", "resolve": "^1.22.1" }, @@ -1101,13 +1942,13 @@ } }, "node_modules/@rollup/pluginutils": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.0.tgz", - "integrity": "sha512-XTIWOPPcpvyKI6L1NHo0lFlCyznUEyPmPY1mc3KpPVDYulHSTvyeLNVW00QTLIAFNhR3kYnJTQHeGqU4M3n09g==", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.3.tgz", + "integrity": "sha512-Pnsb6f32CD2W3uCaLZIzDmeFyQ2b8UWMFI7xtwUezpcGBDVDW6y9XgAWIlARiGAo6eNF5FK5aQTr0LFyNyqq5A==", "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^2.0.2", - "picomatch": "^2.3.1" + "picomatch": "^4.0.2" }, "engines": { "node": ">=14.0.0" @@ -1121,10 +1962,21 @@ } } }, + "node_modules/@rollup/pluginutils/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.9.5.tgz", - "integrity": "sha512-idWaG8xeSRCfRq9KpRysDHJ/rEHBEXcHuJ82XY0yYFIWnLMjZv9vF/7DOq8djQ2n3Lk6+3qfSH8AqlmHlmi1MA==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.28.1.tgz", + "integrity": "sha512-2aZp8AES04KI2dy3Ss6/MDjXbwBzj+i0GqKtWXgw2/Ma6E4jJvujryO6gJAghIRVz7Vwr9Gtl/8na3nDUKpraQ==", "cpu": [ "arm" ], @@ -1134,9 +1986,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.9.5.tgz", - "integrity": "sha512-f14d7uhAMtsCGjAYwZGv6TwuS3IFaM4ZnGMUn3aCBgkcHAYErhV1Ad97WzBvS2o0aaDv4mVz+syiN0ElMyfBPg==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.28.1.tgz", + "integrity": "sha512-EbkK285O+1YMrg57xVA+Dp0tDBRB93/BZKph9XhMjezf6F4TpYjaUSuPt5J0fZXlSag0LmZAsTmdGGqPp4pQFA==", "cpu": [ "arm64" ], @@ -1146,9 +1998,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.9.5.tgz", - "integrity": "sha512-ndoXeLx455FffL68OIUrVr89Xu1WLzAG4n65R8roDlCoYiQcGGg6MALvs2Ap9zs7AHg8mpHtMpwC8jBBjZrT/w==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.28.1.tgz", + "integrity": "sha512-prduvrMKU6NzMq6nxzQw445zXgaDBbMQvmKSJaxpaZ5R1QDM8w+eGxo6Y/jhT/cLoCvnZI42oEqf9KQNYz1fqQ==", "cpu": [ "arm64" ], @@ -1158,9 +2010,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.9.5.tgz", - "integrity": "sha512-UmElV1OY2m/1KEEqTlIjieKfVwRg0Zwg4PLgNf0s3glAHXBN99KLpw5A5lrSYCa1Kp63czTpVll2MAqbZYIHoA==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.28.1.tgz", + "integrity": "sha512-WsvbOunsUk0wccO/TV4o7IKgloJ942hVFK1CLatwv6TJspcCZb9umQkPdvB7FihmdxgaKR5JyxDjWpCOp4uZlQ==", "cpu": [ "x64" ], @@ -1169,10 +2021,46 @@ "darwin" ] }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.28.1.tgz", + "integrity": "sha512-HTDPdY1caUcU4qK23FeeGxCdJF64cKkqajU0iBnTVxS8F7H/7BewvYoG+va1KPSL63kQ1PGNyiwKOfReavzvNA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.28.1.tgz", + "integrity": "sha512-m/uYasxkUevcFTeRSM9TeLyPe2QDuqtjkeoTpP9SW0XxUWfcYrGDMkO/m2tTw+4NMAF9P2fU3Mw4ahNvo7QmsQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ] + }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.9.5.tgz", - "integrity": "sha512-Q0LcU61v92tQB6ae+udZvOyZ0wfpGojtAKrrpAaIqmJ7+psq4cMIhT/9lfV6UQIpeItnq/2QDROhNLo00lOD1g==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.28.1.tgz", + "integrity": "sha512-QAg11ZIt6mcmzpNE6JZBpKfJaKkqTm1A9+y9O+frdZJEuhQxiugM05gnCWiANHj4RmbgeVJpTdmKRmH/a+0QbA==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.28.1.tgz", + "integrity": "sha512-dRP9PEBfolq1dmMcFqbEPSd9VlRuVWEGSmbxVEfiq2cs2jlZAl0YNxFzAQS2OrQmsLBLAATDMb3Z6MFv5vOcXg==", "cpu": [ "arm" ], @@ -1182,9 +2070,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.9.5.tgz", - "integrity": "sha512-dkRscpM+RrR2Ee3eOQmRWFjmV/payHEOrjyq1VZegRUa5OrZJ2MAxBNs05bZuY0YCtpqETDy1Ix4i/hRqX98cA==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.28.1.tgz", + "integrity": "sha512-uGr8khxO+CKT4XU8ZUH1TTEUtlktK6Kgtv0+6bIFSeiSlnGJHG1tSFSjm41uQ9sAO/5ULx9mWOz70jYLyv1QkA==", "cpu": [ "arm64" ], @@ -1194,9 +2082,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.9.5.tgz", - "integrity": "sha512-QaKFVOzzST2xzY4MAmiDmURagWLFh+zZtttuEnuNn19AiZ0T3fhPyjPPGwLNdiDT82ZE91hnfJsUiDwF9DClIQ==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.28.1.tgz", + "integrity": "sha512-QF54q8MYGAqMLrX2t7tNpi01nvq5RI59UBNx+3+37zoKX5KViPo/gk2QLhsuqok05sSCRluj0D00LzCwBikb0A==", "cpu": [ "arm64" ], @@ -1205,10 +2093,34 @@ "linux" ] }, + "node_modules/@rollup/rollup-linux-loongarch64-gnu": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.28.1.tgz", + "integrity": "sha512-vPul4uodvWvLhRco2w0GcyZcdyBfpfDRgNKU+p35AWEbJ/HPs1tOUrkSueVbBS0RQHAf/A+nNtDpvw95PeVKOA==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.28.1.tgz", + "integrity": "sha512-pTnTdBuC2+pt1Rmm2SV7JWRqzhYpEILML4PKODqLz+C7Ou2apEV52h19CR7es+u04KlqplggmN9sqZlekg3R1A==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ] + }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.9.5.tgz", - "integrity": "sha512-HeGqmRJuyVg6/X6MpE2ur7GbymBPS8Np0S/vQFHDmocfORT+Zt76qu+69NUoxXzGqVP1pzaY6QIi0FJWLC3OPA==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.28.1.tgz", + "integrity": "sha512-vWXy1Nfg7TPBSuAncfInmAI/WZDd5vOklyLJDdIRKABcZWojNDY0NJwruY2AcnCLnRJKSaBgf/GiJfauu8cQZA==", "cpu": [ "riscv64" ], @@ -1217,10 +2129,22 @@ "linux" ] }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.28.1.tgz", + "integrity": "sha512-/yqC2Y53oZjb0yz8PVuGOQQNOTwxcizudunl/tFs1aLvObTclTwZ0JhXF2XcPT/zuaymemCDSuuUPXJJyqeDOg==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ] + }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.9.5.tgz", - "integrity": "sha512-Dq1bqBdLaZ1Gb/l2e5/+o3B18+8TI9ANlA1SkejZqDgdU/jK/ThYaMPMJpVMMXy2uRHvGKbkz9vheVGdq3cJfA==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.28.1.tgz", + "integrity": "sha512-fzgeABz7rrAlKYB0y2kSEiURrI0691CSL0+KXwKwhxvj92VULEDQLpBYLHpF49MSiPG4sq5CK3qHMnb9tlCjBw==", "cpu": [ "x64" ], @@ -1230,9 +2154,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.9.5.tgz", - "integrity": "sha512-ezyFUOwldYpj7AbkwyW9AJ203peub81CaAIVvckdkyH8EvhEIoKzaMFJj0G4qYJ5sw3BpqhFrsCc30t54HV8vg==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.28.1.tgz", + "integrity": "sha512-xQTDVzSGiMlSshpJCtudbWyRfLaNiVPXt1WgdWTwWz9n0U12cI2ZVtWe/Jgwyv/6wjL7b66uu61Vg0POWVfz4g==", "cpu": [ "x64" ], @@ -1242,9 +2166,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.9.5.tgz", - "integrity": "sha512-aHSsMnUw+0UETB0Hlv7B/ZHOGY5bQdwMKJSzGfDfvyhnpmVxLMGnQPGNE9wgqkLUs3+gbG1Qx02S2LLfJ5GaRQ==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.28.1.tgz", + "integrity": "sha512-wSXmDRVupJstFP7elGMgv+2HqXelQhuNf+IS4V+nUpNVi/GUiBgDmfwD0UGN3pcAnWsgKG3I52wMOBnk1VHr/A==", "cpu": [ "arm64" ], @@ -1254,9 +2178,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.9.5.tgz", - "integrity": "sha512-AiqiLkb9KSf7Lj/o1U3SEP9Zn+5NuVKgFdRIZkvd4N0+bYrTOovVd0+LmYCPQGbocT4kvFyK+LXCDiXPBF3fyA==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.28.1.tgz", + "integrity": "sha512-ZkyTJ/9vkgrE/Rk9vhMXhf8l9D+eAhbAVbsGsXKy2ohmJaWg0LPQLnIxRdRp/bKyr8tXuPlXhIoGlEB5XpJnGA==", "cpu": [ "ia32" ], @@ -1266,9 +2190,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.9.5.tgz", - "integrity": "sha512-1q+mykKE3Vot1kaFJIDoUFv5TuW+QQVaf2FmTT9krg86pQrGStOSJJ0Zil7CFagyxDuouTepzt5Y5TVzyajOdQ==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.28.1.tgz", + "integrity": "sha512-ZvK2jBafvttJjoIdKm/Q/Bh7IJ1Ose9IBOwpOXcOvW3ikGTQGmKDgxTC6oCAzW6PynbkKP8+um1du81XJHZ0JA==", "cpu": [ "x64" ], @@ -1277,237 +2201,365 @@ "win32" ] }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true, + "license": "MIT" + }, "node_modules/@rushstack/eslint-patch": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.7.0.tgz", - "integrity": "sha512-Jh4t/593gxs0lJZ/z3NnasKlplXT2f+4y/LZYuaKZW5KAaiVFL/fThhs+17EbUd53jUVJ0QudYCBGbN/psvaqg==" + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.15.0.tgz", + "integrity": "sha512-ojSshQPKwVvSMR8yT2L/QtUkV5SXi/IfDiJ4/8d6UbTPjiHVmxZzUAzGD8Tzks1b9+qQkZa0isUOvYObedITaw==", + "license": "MIT" }, "node_modules/@sentry-internal/feedback": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/feedback/-/feedback-7.92.0.tgz", - "integrity": "sha512-/jEALRtVqboxB9kcK2tag8QCO6XANTlGBb9RV3oeGXJe0DDNJXRq6wVZbfgztXJRrfgx4XVDcNt1pRVoGGG++g==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry-internal/feedback/-/feedback-7.120.4.tgz", + "integrity": "sha512-eSwgvTdrh03zYYaI6UVOjI9p4VmKg6+c2+CBQfRZX++6wwnCVsNv7XF7WUIpVGBAkJ0N2oapjQmCzJKGKBRWQg==", + "license": "MIT", "dependencies": { - "@sentry/core": "7.92.0", - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=12" } }, "node_modules/@sentry-internal/feedback/node_modules/@sentry/core": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.92.0.tgz", - "integrity": "sha512-1Tly7YB2I1byI5xb0Cwrxs56Rhww+6mQ7m9P7rTmdC3/ijOzbEoohtYIUPwcooCEarpbEJe/tAayRx6BrH2UbQ==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.120.4.tgz", + "integrity": "sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry-internal/feedback/node_modules/@sentry/types": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.92.0.tgz", - "integrity": "sha512-APmSOuZuoRGpbPpPeYIbMSplPjiWNLZRQa73QiXuTflW4Tu/ItDlU8hOa2+A6JKVkJCuD2EN6yUrxDGSMyNXeg==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.120.4.tgz", + "integrity": "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==", + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@sentry-internal/feedback/node_modules/@sentry/utils": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.92.0.tgz", - "integrity": "sha512-3nEfrQ1z28b/2zgFGANPh5yMVtgwXmrasZxTvKbrAj+KWJpjrJHrIR84r9W277J44NMeZ5RhRW2uoDmuBslPnA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.120.4.tgz", + "integrity": "sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==", + "license": "MIT", + "dependencies": { + "@sentry/types": "7.120.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry-internal/replay-canvas": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry-internal/replay-canvas/-/replay-canvas-7.120.4.tgz", + "integrity": "sha512-2+W4CgUL1VzrPjArbTid4WhKh7HH21vREVilZdvffQPVwOEpgNTPAb69loQuTlhJVveh9hWTj2nE5UXLbLP+AA==", + "license": "MIT", + "dependencies": { + "@sentry/core": "7.120.4", + "@sentry/replay": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@sentry-internal/replay-canvas/node_modules/@sentry/core": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.120.4.tgz", + "integrity": "sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0" + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry-internal/replay-canvas/node_modules/@sentry/types": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.120.4.tgz", + "integrity": "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry-internal/replay-canvas/node_modules/@sentry/utils": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.120.4.tgz", + "integrity": "sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==", + "license": "MIT", + "dependencies": { + "@sentry/types": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/browser": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/browser/-/browser-7.92.0.tgz", - "integrity": "sha512-loMr02/zQ38u8aQhYLtIBg0i5n3ps2e3GUXrt3CdsJQdkRYfa62gcrE7SzvoEpMVHTk7VOI4fWGht8cWw/1k3A==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/browser/-/browser-7.120.4.tgz", + "integrity": "sha512-ymlNtIPG6HAKzM/JXpWVGCzCNufZNADfy+O/olZuVJW5Be1DtOFyRnBvz0LeKbmxJbXb2lX/XMhuen6PXPdoQw==", + "license": "MIT", "dependencies": { - "@sentry-internal/feedback": "7.92.0", - "@sentry-internal/tracing": "7.92.0", - "@sentry/core": "7.92.0", - "@sentry/replay": "7.92.0", - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry-internal/feedback": "7.120.4", + "@sentry-internal/replay-canvas": "7.120.4", + "@sentry-internal/tracing": "7.120.4", + "@sentry/core": "7.120.4", + "@sentry/integrations": "7.120.4", + "@sentry/replay": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/browser/node_modules/@sentry-internal/tracing": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.92.0.tgz", - "integrity": "sha512-ur55vPcUUUWFUX4eVLNP71ohswK7ZZpleNZw9Y1GfLqyI+0ILQUwjtzqItJrdClvVsdRZJMRmDV40Hp9Lbb9mA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.120.4.tgz", + "integrity": "sha512-Fz5+4XCg3akeoFK+K7g+d7HqGMjmnLoY2eJlpONJmaeT9pXY7yfUyXKZMmMajdE2LxxKJgQ2YKvSCaGVamTjHw==", + "license": "MIT", "dependencies": { - "@sentry/core": "7.92.0", - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/browser/node_modules/@sentry/core": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.92.0.tgz", - "integrity": "sha512-1Tly7YB2I1byI5xb0Cwrxs56Rhww+6mQ7m9P7rTmdC3/ijOzbEoohtYIUPwcooCEarpbEJe/tAayRx6BrH2UbQ==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.120.4.tgz", + "integrity": "sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/browser/node_modules/@sentry/types": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.92.0.tgz", - "integrity": "sha512-APmSOuZuoRGpbPpPeYIbMSplPjiWNLZRQa73QiXuTflW4Tu/ItDlU8hOa2+A6JKVkJCuD2EN6yUrxDGSMyNXeg==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.120.4.tgz", + "integrity": "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==", + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@sentry/browser/node_modules/@sentry/utils": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.92.0.tgz", - "integrity": "sha512-3nEfrQ1z28b/2zgFGANPh5yMVtgwXmrasZxTvKbrAj+KWJpjrJHrIR84r9W277J44NMeZ5RhRW2uoDmuBslPnA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.120.4.tgz", + "integrity": "sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==", + "license": "MIT", + "dependencies": { + "@sentry/types": "7.120.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry/integrations": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/integrations/-/integrations-7.120.4.tgz", + "integrity": "sha512-kkBTLk053XlhDCg7OkBQTIMF4puqFibeRO3E3YiVc4PGLnocXMaVpOSCkMqAc1k1kZ09UgGi8DxfQhnFEjUkpA==", + "license": "MIT", + "dependencies": { + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4", + "localforage": "^1.8.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry/integrations/node_modules/@sentry/core": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.120.4.tgz", + "integrity": "sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==", + "license": "MIT", + "dependencies": { + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry/integrations/node_modules/@sentry/types": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.120.4.tgz", + "integrity": "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry/integrations/node_modules/@sentry/utils": { + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.120.4.tgz", + "integrity": "sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0" + "@sentry/types": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/replay": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/replay/-/replay-7.92.0.tgz", - "integrity": "sha512-G1t9Uvc9cR8VpNkElwvHIMGzykjIKikb10n0tfVd3e+rBPMCCjCPWOduwG6jZYxcvCjTpqmJh6NSLXxL/Mt4JA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/replay/-/replay-7.120.4.tgz", + "integrity": "sha512-FW8sPenNFfnO/K7sncsSTX4rIVak9j7VUiLIagJrcqZIC7d1dInFNjy8CdVJUlyz3Y3TOgIl3L3+ZpjfyMnaZg==", + "license": "MIT", "dependencies": { - "@sentry-internal/tracing": "7.92.0", - "@sentry/core": "7.92.0", - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry-internal/tracing": "7.120.4", + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=12" } }, "node_modules/@sentry/replay/node_modules/@sentry-internal/tracing": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.92.0.tgz", - "integrity": "sha512-ur55vPcUUUWFUX4eVLNP71ohswK7ZZpleNZw9Y1GfLqyI+0ILQUwjtzqItJrdClvVsdRZJMRmDV40Hp9Lbb9mA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.120.4.tgz", + "integrity": "sha512-Fz5+4XCg3akeoFK+K7g+d7HqGMjmnLoY2eJlpONJmaeT9pXY7yfUyXKZMmMajdE2LxxKJgQ2YKvSCaGVamTjHw==", + "license": "MIT", "dependencies": { - "@sentry/core": "7.92.0", - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/replay/node_modules/@sentry/core": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.92.0.tgz", - "integrity": "sha512-1Tly7YB2I1byI5xb0Cwrxs56Rhww+6mQ7m9P7rTmdC3/ijOzbEoohtYIUPwcooCEarpbEJe/tAayRx6BrH2UbQ==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.120.4.tgz", + "integrity": "sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/replay/node_modules/@sentry/types": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.92.0.tgz", - "integrity": "sha512-APmSOuZuoRGpbPpPeYIbMSplPjiWNLZRQa73QiXuTflW4Tu/ItDlU8hOa2+A6JKVkJCuD2EN6yUrxDGSMyNXeg==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.120.4.tgz", + "integrity": "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==", + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@sentry/replay/node_modules/@sentry/utils": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.92.0.tgz", - "integrity": "sha512-3nEfrQ1z28b/2zgFGANPh5yMVtgwXmrasZxTvKbrAj+KWJpjrJHrIR84r9W277J44NMeZ5RhRW2uoDmuBslPnA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.120.4.tgz", + "integrity": "sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0" + "@sentry/types": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/tracing": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/tracing/-/tracing-7.92.0.tgz", - "integrity": "sha512-1+TFFPVEdax4dNi68gin6MENiyGe9mOuNXfjulrP5eCzUEByus5HAxeDI/LLQ1hArfn048AzwSwKUsS2fO5sbg==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/tracing/-/tracing-7.120.4.tgz", + "integrity": "sha512-cAtpLh23qW3hoqZJ6c36EvFki5NhFWUSK71ALHefqDXEocMlfDc9I+IGn3B/ola2D2TDEDamCy3x32vctKqOag==", + "license": "MIT", "dependencies": { - "@sentry-internal/tracing": "7.92.0" + "@sentry-internal/tracing": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/tracing/node_modules/@sentry-internal/tracing": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.92.0.tgz", - "integrity": "sha512-ur55vPcUUUWFUX4eVLNP71ohswK7ZZpleNZw9Y1GfLqyI+0ILQUwjtzqItJrdClvVsdRZJMRmDV40Hp9Lbb9mA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.120.4.tgz", + "integrity": "sha512-Fz5+4XCg3akeoFK+K7g+d7HqGMjmnLoY2eJlpONJmaeT9pXY7yfUyXKZMmMajdE2LxxKJgQ2YKvSCaGVamTjHw==", + "license": "MIT", "dependencies": { - "@sentry/core": "7.92.0", - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/tracing/node_modules/@sentry/core": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.92.0.tgz", - "integrity": "sha512-1Tly7YB2I1byI5xb0Cwrxs56Rhww+6mQ7m9P7rTmdC3/ijOzbEoohtYIUPwcooCEarpbEJe/tAayRx6BrH2UbQ==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.120.4.tgz", + "integrity": "sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/tracing/node_modules/@sentry/types": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.92.0.tgz", - "integrity": "sha512-APmSOuZuoRGpbPpPeYIbMSplPjiWNLZRQa73QiXuTflW4Tu/ItDlU8hOa2+A6JKVkJCuD2EN6yUrxDGSMyNXeg==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.120.4.tgz", + "integrity": "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==", + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@sentry/tracing/node_modules/@sentry/utils": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.92.0.tgz", - "integrity": "sha512-3nEfrQ1z28b/2zgFGANPh5yMVtgwXmrasZxTvKbrAj+KWJpjrJHrIR84r9W277J44NMeZ5RhRW2uoDmuBslPnA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.120.4.tgz", + "integrity": "sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0" + "@sentry/types": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/vue": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/vue/-/vue-7.92.0.tgz", - "integrity": "sha512-efQoix2Wlc8auZmYh5FZ48rR2BzATyPD1szXKgWjEUESaG+yeBRmEDyK8bG4q7A6m8xsWB84AIISyIeY+hTkLA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/vue/-/vue-7.120.4.tgz", + "integrity": "sha512-dPWNzS0kA8z9IDhOLuCpj6UC1Kw26WnTnjDy5m9qISm8fObalmpG6c+iL4Yj5qSBR0e6Jwak8V1gES2wTQGSLQ==", + "license": "MIT", "dependencies": { - "@sentry/browser": "7.92.0", - "@sentry/core": "7.92.0", - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/browser": "7.120.4", + "@sentry/core": "7.120.4", + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" @@ -1517,132 +2569,229 @@ } }, "node_modules/@sentry/vue/node_modules/@sentry/core": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.92.0.tgz", - "integrity": "sha512-1Tly7YB2I1byI5xb0Cwrxs56Rhww+6mQ7m9P7rTmdC3/ijOzbEoohtYIUPwcooCEarpbEJe/tAayRx6BrH2UbQ==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.120.4.tgz", + "integrity": "sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0", - "@sentry/utils": "7.92.0" + "@sentry/types": "7.120.4", + "@sentry/utils": "7.120.4" }, "engines": { "node": ">=8" } }, "node_modules/@sentry/vue/node_modules/@sentry/types": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.92.0.tgz", - "integrity": "sha512-APmSOuZuoRGpbPpPeYIbMSplPjiWNLZRQa73QiXuTflW4Tu/ItDlU8hOa2+A6JKVkJCuD2EN6yUrxDGSMyNXeg==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.120.4.tgz", + "integrity": "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==", + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@sentry/vue/node_modules/@sentry/utils": { - "version": "7.92.0", - "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.92.0.tgz", - "integrity": "sha512-3nEfrQ1z28b/2zgFGANPh5yMVtgwXmrasZxTvKbrAj+KWJpjrJHrIR84r9W277J44NMeZ5RhRW2uoDmuBslPnA==", + "version": "7.120.4", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.120.4.tgz", + "integrity": "sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==", + "license": "MIT", "dependencies": { - "@sentry/types": "7.92.0" + "@sentry/types": "7.120.4" }, "engines": { "node": ">=8" } }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@solid-primitives/refs": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@solid-primitives/refs/-/refs-1.1.2.tgz", + "integrity": "sha512-K7tf2thy7L+YJjdqXspXOg5xvNEOH8tgEWsp0+1mQk3obHBRD6hEjYZk7p7FlJphSZImS35je3UfmWuD7MhDfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@solid-primitives/utils": "^6.3.2" + }, + "peerDependencies": { + "solid-js": "^1.6.12" + } + }, + "node_modules/@solid-primitives/transition-group": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@solid-primitives/transition-group/-/transition-group-1.1.2.tgz", + "integrity": "sha512-gnHS0OmcdjeoHN9n7Khu8KNrOlRc8a2weETDt2YT6o1zeW/XtUC6Db3Q9pkMU/9cCKdEmN4b0a/41MKAHRhzWA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "solid-js": "^1.6.12" + } + }, + "node_modules/@solid-primitives/utils": { + "version": "6.3.2", + "resolved": "https://registry.npmjs.org/@solid-primitives/utils/-/utils-6.3.2.tgz", + "integrity": "sha512-hZ/M/qr25QOCcwDPOHtGjxTD8w2mNyVAYvcfgwzBHq2RwNqHNdDNsMZYap20+ruRwW4A3Cdkczyoz0TSxLCAPQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "solid-js": "^1.6.12" + } + }, "node_modules/@stripe/stripe-js": { "version": "1.54.2", "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-1.54.2.tgz", "integrity": "sha512-R1PwtDvUfs99cAjfuQ/WpwJ3c92+DAMy9xGApjqlWQMj0FKQabUAys2swfTRNzuYAYJh7NqK2dzcYVNkKLEKUg==" }, - "node_modules/@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "node_modules/@stylistic/eslint-plugin": { + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-5.7.0.tgz", + "integrity": "sha512-PsSugIf9ip1H/mWKj4bi/BlEoerxXAda9ByRFsYuwsmr6af9NxJL0AaiNXs8Le7R21QR5KMiD/KdxZZ71LjAxQ==", "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.9.1", + "@typescript-eslint/types": "^8.52.0", + "eslint-visitor-keys": "^5.0.0", + "espree": "^11.0.0", + "estraverse": "^5.3.0", + "picomatch": "^4.0.3" + }, "engines": { - "node": ">= 10" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "peerDependencies": { + "eslint": ">=9.0.0" } }, - "node_modules/@types/asn1": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@types/asn1/-/asn1-0.2.0.tgz", - "integrity": "sha512-5TMxIpYbIA9c1J0hYQjQDX3wr+rTgQEAXaW2BI8ECM8FO53wSW4HFZplTalrKSHuZUc76NtXcePRhwuOHqGD5g==", + "node_modules/@stylistic/eslint-plugin/node_modules/eslint-visitor-keys": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.0.tgz", + "integrity": "sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==", "dev": true, - "dependencies": { - "@types/node": "*" + "license": "Apache-2.0", + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/@types/chai": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.5.tgz", - "integrity": "sha512-mEo1sAde+UCE6b2hxn332f1g1E8WfYRu6p5SvTKr2ZKC1f7gFJXk4h5PyGP9Dt6gCaG8y8XhwnXWC6Iy2cmBng==", - "dev": true - }, - "node_modules/@types/chai-subset": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.3.tgz", - "integrity": "sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==", + "node_modules/@stylistic/eslint-plugin/node_modules/espree": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-11.0.0.tgz", + "integrity": "sha512-+gMeWRrIh/NsG+3NaLeWHuyeyk70p2tbvZIWBYcqQ4/7Xvars6GYTZNhF1sIeLcc6Wb11He5ffz3hsHyXFrw5A==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "@types/chai": "*" + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^5.0.0" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/@types/eslint": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.37.0.tgz", - "integrity": "sha512-Piet7dG2JBuDIfohBngQ3rCt7MgO9xCO4xIMKxBThCq5PNRB91IjlJ10eJVwfoNtvTErmxLzwBZ7rHZtbOMmFQ==", + "node_modules/@stylistic/eslint-plugin/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, - "peer": true, - "dependencies": { - "@types/estree": "*", - "@types/json-schema": "*" + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@tinymce/tinymce-vue": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/@tinymce/tinymce-vue/-/tinymce-vue-6.3.0.tgz", + "integrity": "sha512-DSP8Jhd3XqCCliTnusfbmz3D8GqQ4iRzkc4aadYHDcJPVjkaqopJ61McOdH82CSy599vGLkPjGzqJYWJkRMiUA==", + "license": "MIT", + "peerDependencies": { + "tinymce": "^8.0.0 || ^7.0.0 || ^6.0.0 || ^5.5.1", + "vue": "^3.0.0" + }, + "peerDependenciesMeta": { + "tinymce": { + "optional": true + } } }, - "node_modules/@types/eslint-scope": { - "version": "3.7.4", - "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.4.tgz", - "integrity": "sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA==", + "node_modules/@types/asn1": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@types/asn1/-/asn1-0.2.0.tgz", + "integrity": "sha512-5TMxIpYbIA9c1J0hYQjQDX3wr+rTgQEAXaW2BI8ECM8FO53wSW4HFZplTalrKSHuZUc76NtXcePRhwuOHqGD5g==", "dev": true, - "peer": true, "dependencies": { - "@types/eslint": "*", - "@types/estree": "*" + "@types/node": "*" } }, "node_modules/@types/estree": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", - "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==" + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==" }, - "node_modules/@types/istanbul-lib-coverage": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", - "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", - "dev": true + "node_modules/@types/gensync": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/gensync/-/gensync-1.0.4.tgz", + "integrity": "sha512-C3YYeRQWp2fmq9OryX+FoDy8nXS6scQ7dPptD8LnFDAUNcKWJjXQKDNJD3HVm+kOUsXhTOkpi69vI4EuAr95bA==" }, "node_modules/@types/json-schema": { - "version": "7.0.11", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", - "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==" + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" }, "node_modules/@types/json5": { "version": "0.0.29", "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true, + "license": "MIT" }, "node_modules/@types/linkify-it": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-3.0.2.tgz", - "integrity": "sha512-HZQYqbiFVWufzCwexrvh694SOim8z2d+xJl5UNamcvQFejLY/2YUtzXHYi3cHdI7PMlS8ejH2slRAOJQ32aNbA==" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==", + "license": "MIT" + }, + "node_modules/@types/markdown-it": { + "version": "14.1.2", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.2.tgz", + "integrity": "sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==", + "license": "MIT", + "dependencies": { + "@types/linkify-it": "^5", + "@types/mdurl": "^2" + } }, "node_modules/@types/mdurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-1.0.2.tgz", - "integrity": "sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==", + "license": "MIT" }, "node_modules/@types/node": { - "version": "18.19.7", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.7.tgz", - "integrity": "sha512-IGRJfoNX10N/PfrReRZ1br/7SQ+2vF/tK3KXNwzXz82D32z5dMQEoOlFew18nLSN+vMNcLY4GrKfzwi/yWI8/w==", + "version": "18.19.130", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz", + "integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==", "devOptional": true, + "license": "MIT", "dependencies": { "undici-types": "~5.26.4" } @@ -1662,11 +2811,6 @@ "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", "dev": true }, - "node_modules/@types/semver": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", - "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==" - }, "node_modules/@types/sshpk": { "version": "1.17.4", "resolved": "https://registry.npmjs.org/@types/sshpk/-/sshpk-1.17.4.tgz", @@ -1678,9 +2822,10 @@ } }, "node_modules/@types/web-bluetooth": { - "version": "0.0.14", - "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.14.tgz", - "integrity": "sha512-5d2RhCard1nQUC3aHcq/gHzWYO6K0WJmAbjO7mQJgCQKtZpgXxv1rOM6O/dBDhDYYVutk1sciOgNSe+5YyfM8A==" + "version": "0.0.21", + "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.21.tgz", + "integrity": "sha512-oIQLCGWtcFZy2JW77j9k8nHzAOpqMHLQejDA48XXMWH6tjCQHz5RCFz1bzsmROyL6PUm+LLnUiI4BCn221inxA==", + "license": "MIT" }, "node_modules/@types/webfontloader": { "version": "1.6.38", @@ -1688,113 +2833,160 @@ "integrity": "sha512-kUaF72Fv202suFx6yBrwXqeVRMx7hGtJTesyESZgn9sEPCUeDXm2p0SiyS1MTqW74nQP4p7JyrOCwZ7pNFns4w==", "dev": true }, - "node_modules/@typescript-eslint/eslint-plugin": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz", - "integrity": "sha512-TiZzBSJja/LbhNPvk6yc0JrX9XqhQ0hdh6M2svYfsHGejaKFIAGd9MQ+ERIMzLGlN/kZoYIgdxFV0PuljTKXag==", - "dependencies": { - "@eslint-community/regexpp": "^4.4.0", - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/type-utils": "5.62.0", - "@typescript-eslint/utils": "5.62.0", - "debug": "^4.3.4", - "graphemer": "^1.4.0", - "ignore": "^5.2.0", - "natural-compare-lite": "^1.4.0", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.54.0.tgz", + "integrity": "sha512-hAAP5io/7csFStuOmR782YmTthKBJ9ND3WVL60hcOjvtGFb+HJxH4O5huAcmcZ9v9G8P+JETiZ/G1B8MALnWZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.12.2", + "@typescript-eslint/scope-manager": "8.54.0", + "@typescript-eslint/type-utils": "8.54.0", + "@typescript-eslint/utils": "8.54.0", + "@typescript-eslint/visitor-keys": "8.54.0", + "ignore": "^7.0.5", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.54.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.54.0.tgz", + "integrity": "sha512-BtE0k6cjwjLZoZixN0t5AKP0kSzlGu7FctRXYuPAm//aaiZhmfq1JwdYpYr1brzEspYyFeF+8XF5j2VK6oalrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "8.54.0", + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/typescript-estree": "8.54.0", + "@typescript-eslint/visitor-keys": "8.54.0", + "debug": "^4.4.3" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^5.0.0", - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" } }, - "node_modules/@typescript-eslint/parser": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.62.0.tgz", - "integrity": "sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==", + "node_modules/@typescript-eslint/project-service": { + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.54.0.tgz", + "integrity": "sha512-YPf+rvJ1s7MyiWM4uTRhE4DvBXrEV+d8oC3P9Y2eT7S+HBS0clybdMIPnhiATi9vZOYDc7OQ1L/i6ga6NFYK/g==", + "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/typescript-estree": "5.62.0", - "debug": "^4.3.4" + "@typescript-eslint/tsconfig-utils": "^8.54.0", + "@typescript-eslint/types": "^8.54.0", + "debug": "^4.4.3" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz", - "integrity": "sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.54.0.tgz", + "integrity": "sha512-27rYVQku26j/PbHYcVfRPonmOlVI6gihHtXFbTdB5sb6qA0wdAQAbyXFVarQ5t4HRojIz64IV90YtsjQSSGlQg==", + "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/visitor-keys": "5.62.0" + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/visitor-keys": "8.54.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.54.0.tgz", + "integrity": "sha512-dRgOyT2hPk/JwxNMZDsIXDgyl9axdJI3ogZ2XWhBPsnZUv+hPesa5iuhdYt2gzwA9t8RE5ytOJ6xB0moV0Ujvw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/type-utils": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.62.0.tgz", - "integrity": "sha512-xsSQreu+VnfbqQpW5vnCJdq1Z3Q0U31qiWmRhr98ONQmcp/yhiPJFPq8MXiJVLiksmOKSjIldZzkebzHuCGzew==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.54.0.tgz", + "integrity": "sha512-hiLguxJWHjjwL6xMBwD903ciAwd7DmK30Y9Axs/etOkftC3ZNN9K44IuRD/EB08amu+Zw6W37x9RecLkOo3pMA==", + "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "5.62.0", - "@typescript-eslint/utils": "5.62.0", - "debug": "^4.3.4", - "tsutils": "^3.21.0" + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/typescript-estree": "8.54.0", + "@typescript-eslint/utils": "8.54.0", + "debug": "^4.4.3", + "ts-api-utils": "^2.4.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "*" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/types": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz", - "integrity": "sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.54.0.tgz", + "integrity": "sha512-PDUI9R1BVjqu7AUDsRBbKMtwmjWcn4J3le+5LpcFgWULN3LvHC5rkc9gCVxbrsrGmO1jfPybN5s6h4Jy+OnkAA==", + "dev": true, + "license": "MIT", "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", @@ -1802,338 +2994,524 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz", - "integrity": "sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.54.0.tgz", + "integrity": "sha512-BUwcskRaPvTk6fzVWgDPdUndLjB87KYDrN5EYGetnktoeAvPtO4ONHlAZDnj5VFnUANg0Sjm7j4usBlnoVMHwA==", + "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/visitor-keys": "5.62.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "@typescript-eslint/project-service": "8.54.0", + "@typescript-eslint/tsconfig-utils": "8.54.0", + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/visitor-keys": "8.54.0", + "debug": "^4.4.3", + "minimatch": "^9.0.5", + "semver": "^7.7.3", + "tinyglobby": "^0.2.15", + "ts-api-utils": "^2.4.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/@typescript-eslint/utils": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.62.0.tgz", - "integrity": "sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.54.0.tgz", + "integrity": "sha512-9Cnda8GS57AQakvRyG0PTejJNlA2xhvyNtEVIMlDWOOeEyBkYWhGPnfrIAnqxLMTSTo6q8g12XVjjev5l1NvMA==", + "dev": true, + "license": "MIT", "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@types/json-schema": "^7.0.9", - "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/typescript-estree": "5.62.0", - "eslint-scope": "^5.1.1", - "semver": "^7.3.7" + "@eslint-community/eslint-utils": "^4.9.1", + "@typescript-eslint/scope-manager": "8.54.0", + "@typescript-eslint/types": "8.54.0", + "@typescript-eslint/typescript-estree": "8.54.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz", - "integrity": "sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.54.0.tgz", + "integrity": "sha512-VFlhGSl4opC0bprJiItPQ1RfUhGDIBokcPwaFH4yiBCaNPeld/9VeXbiPO1cLyorQi1G1vL+ecBk1x8o1axORA==", + "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "5.62.0", - "eslint-visitor-keys": "^3.3.0" + "@typescript-eslint/types": "8.54.0", + "eslint-visitor-keys": "^4.2.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, "node_modules/@vitejs/plugin-vue": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.0.3.tgz", - "integrity": "sha512-b8S5dVS40rgHdDrw+DQi/xOM9ed+kSRZzfm1T74bMmBDCd8XO87NKlFYInzCtwvtWwXZvo1QxE2OSspTATWrbA==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.2.4.tgz", + "integrity": "sha512-7Yx/SXSOcQq5HiiV3orevHUFn+pmMB4cgbEkDYgnkUWb0WfeQ/wa2yFv6D5ICiCQOVpjA7vYDXrC7AGO8yjDHA==", "dev": true, + "license": "MIT", "engines": { "node": "^18.0.0 || >=20.0.0" }, "peerDependencies": { - "vite": "^5.0.0", + "vite": "^5.0.0 || ^6.0.0", "vue": "^3.2.25" } }, - "node_modules/@vitest/coverage-c8": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/coverage-c8/-/coverage-c8-0.31.4.tgz", - "integrity": "sha512-VPx368m4DTcpA/P0v3YdVxl4QOSh1DbUcXURLRvDShrIB5KxOgfzw4Bn2R8AhAe/GyiWW/FIsJ/OJdYXCCiC1w==", + "node_modules/@vitest/coverage-v8": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-2.1.9.tgz", + "integrity": "sha512-Z2cOr0ksM00MpEfyVE8KXIYPEcBFxdbLSs56L8PO0QQMxt/6bDj45uQfxoc96v05KW3clk7vvgP0qfDit9DmfQ==", "dev": true, + "license": "MIT", "dependencies": { - "@ampproject/remapping": "^2.2.1", - "c8": "^7.13.0", - "magic-string": "^0.30.0", - "picocolors": "^1.0.0", - "std-env": "^3.3.2" + "@ampproject/remapping": "^2.3.0", + "@bcoe/v8-coverage": "^0.2.3", + "debug": "^4.3.7", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.6", + "istanbul-reports": "^3.1.7", + "magic-string": "^0.30.12", + "magicast": "^0.3.5", + "std-env": "^3.8.0", + "test-exclude": "^7.0.1", + "tinyrainbow": "^1.2.0" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "vitest": ">=0.30.0 <1" + "@vitest/browser": "2.1.9", + "vitest": "2.1.9" + }, + "peerDependenciesMeta": { + "@vitest/browser": { + "optional": true + } } }, - "node_modules/@vitest/expect": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-0.31.4.tgz", - "integrity": "sha512-tibyx8o7GUyGHZGyPgzwiaPaLDQ9MMuCOrc03BYT0nryUuhLbL7NV2r/q98iv5STlwMgaKuFJkgBW/8iPKwlSg==", + "node_modules/@vitest/coverage-v8/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@vitest/coverage-v8/node_modules/foreground-child": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", + "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", "dev": true, "dependencies": { - "@vitest/spy": "0.31.4", - "@vitest/utils": "0.31.4", - "chai": "^4.3.7" + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" }, "funding": { - "url": "https://opencollective.com/vitest" + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@vitest/runner": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-0.31.4.tgz", - "integrity": "sha512-Wgm6UER+gwq6zkyrm5/wbpXGF+g+UBB78asJlFkIOwyse0pz8lZoiC6SW5i4gPnls/zUcPLWS7Zog0LVepXnpg==", + "node_modules/@vitest/coverage-v8/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", "dev": true, "dependencies": { - "@vitest/utils": "0.31.4", - "concordance": "^5.0.4", - "p-limit": "^4.0.0", - "pathe": "^1.1.0" + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" }, "funding": { - "url": "https://opencollective.com/vitest" + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@vitest/runner/node_modules/p-limit": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", - "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", + "node_modules/@vitest/coverage-v8/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", "dev": true, "dependencies": { - "yocto-queue": "^1.0.0" + "brace-expansion": "^2.0.1" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=16 || 14 >=14.17" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@vitest/runner/node_modules/yocto-queue": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", - "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", + "node_modules/@vitest/coverage-v8/node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", "dev": true, "engines": { - "node": ">=12.20" + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/@vitest/coverage-v8/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@vitest/coverage-v8/node_modules/test-exclude": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-7.0.1.tgz", + "integrity": "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^10.4.1", + "minimatch": "^9.0.4" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@vitest/expect": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.1.9.tgz", + "integrity": "sha512-UJCIkTBenHeKT1TTlKMJWy1laZewsRIzYighyYiJKZreqtdxSos/S1t+ktRMQWu2CKqaarrkeszJx1cgC5tGZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "2.1.9", + "@vitest/utils": "2.1.9", + "chai": "^5.1.2", + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-2.1.9.tgz", + "integrity": "sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "2.1.9", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.12" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/mocker/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/@vitest/pretty-format": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.9.tgz", + "integrity": "sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.1.9.tgz", + "integrity": "sha512-ZXSSqTFIrzduD63btIfEyOmNcBmQvgOVsPNPe0jYtESiXkhd8u2erDLnMxmGrDCwHCCHE7hxwRDCT3pt0esT4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "2.1.9", + "pathe": "^1.1.2" + }, + "funding": { + "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/snapshot": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-0.31.4.tgz", - "integrity": "sha512-LemvNumL3NdWSmfVAMpXILGyaXPkZbG5tyl6+RQSdcHnTj6hvA49UAI8jzez9oQyE/FWLKRSNqTGzsHuk89LRA==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.1.9.tgz", + "integrity": "sha512-oBO82rEjsxLNJincVhLhaxxZdEtV0EFHMK5Kmx5sJ6H9L183dHECjiefOAdnqpIgT5eZwT04PoggUnW88vOBNQ==", "dev": true, + "license": "MIT", "dependencies": { - "magic-string": "^0.30.0", - "pathe": "^1.1.0", - "pretty-format": "^27.5.1" + "@vitest/pretty-format": "2.1.9", + "magic-string": "^0.30.12", + "pathe": "^1.1.2" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/spy": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-0.31.4.tgz", - "integrity": "sha512-3ei5ZH1s3aqbEyftPAzSuunGICRuhE+IXOmpURFdkm5ybUADk+viyQfejNk6q8M5QGX8/EVKw+QWMEP3DTJDag==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.1.9.tgz", + "integrity": "sha512-E1B35FwzXXTs9FHNK6bDszs7mtydNi5MIfUWpceJ8Xbfb1gBMscAnwLbEu+B44ed6W3XjL9/ehLPHR1fkf1KLQ==", "dev": true, + "license": "MIT", "dependencies": { - "tinyspy": "^2.1.0" + "tinyspy": "^3.0.2" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/ui": { - "version": "0.28.5", - "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-0.28.5.tgz", - "integrity": "sha512-hzzZzv38mH/LMFh54QEJpWFuGixZZBOD+C0fHU81d1lsvochPwNZhWJbuRJQNyZLSMZYCYW4hF6PpNQJXDHDmg==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-2.1.9.tgz", + "integrity": "sha512-izzd2zmnk8Nl5ECYkW27328RbQ1nKvkm6Bb5DAaz1Gk59EbLkiCMa6OLT0NoaAYTjOFS6N+SMYW1nh4/9ljPiw==", "dev": true, + "license": "MIT", "dependencies": { - "fast-glob": "^3.2.12", - "flatted": "^3.2.7", - "pathe": "^1.1.0", - "picocolors": "^1.0.0", - "sirv": "^2.0.2" + "@vitest/utils": "2.1.9", + "fflate": "^0.8.2", + "flatted": "^3.3.1", + "pathe": "^1.1.2", + "sirv": "^3.0.0", + "tinyglobby": "^0.2.10", + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "vitest": "2.1.9" } }, "node_modules/@vitest/utils": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-0.31.4.tgz", - "integrity": "sha512-DobZbHacWznoGUfYU8XDPY78UubJxXfMNY1+SUdOp1NsI34eopSA6aZMeaGu10waSOeYwE8lxrd/pLfT0RMxjQ==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.9.tgz", + "integrity": "sha512-v0psaMSkNJ3A2NMrUEHFRzJtDPFn+/VWZ5WxImB21T9fjucJRmS7xCS3ppEnARb9y11OAzaD+P2Ps+b+BGX5iQ==", "dev": true, + "license": "MIT", "dependencies": { - "concordance": "^5.0.4", - "loupe": "^2.3.6", - "pretty-format": "^27.5.1" + "@vitest/pretty-format": "2.1.9", + "loupe": "^3.1.2", + "tinyrainbow": "^1.2.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@volar/language-core": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-1.11.1.tgz", - "integrity": "sha512-dOcNn3i9GgZAcJt43wuaEykSluAuOkQgzni1cuxLxTV0nJKanQztp7FxyswdRILaKH+P2XZMPRp2S4MV/pElCw==", + "version": "2.4.27", + "resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-2.4.27.tgz", + "integrity": "sha512-DjmjBWZ4tJKxfNC1F6HyYERNHPYS7L7OPFyCrestykNdUZMFYzI9WTyvwPcaNaHlrEUwESHYsfEw3isInncZxQ==", "dev": true, + "license": "MIT", "dependencies": { - "@volar/source-map": "1.11.1" + "@volar/source-map": "2.4.27" } }, "node_modules/@volar/source-map": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-1.11.1.tgz", - "integrity": "sha512-hJnOnwZ4+WT5iupLRnuzbULZ42L7BWWPMmruzwtLhJfpDVoZLjNBxHDi2sY2bgZXCKlpU5XcsMFoYrsQmPhfZg==", + "version": "2.4.27", + "resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-2.4.27.tgz", + "integrity": "sha512-ynlcBReMgOZj2i6po+qVswtDUeeBRCTgDurjMGShbm8WYZgJ0PA4RmtebBJ0BCYol1qPv3GQF6jK7C9qoVc7lg==", "dev": true, - "dependencies": { - "muggle-string": "^0.3.1" - } + "license": "MIT" }, "node_modules/@volar/typescript": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@volar/typescript/-/typescript-1.11.1.tgz", - "integrity": "sha512-iU+t2mas/4lYierSnoFOeRFQUhAEMgsFuQxoxvwn5EdQopw43j+J27a4lt9LMInx1gLJBC6qL14WYGlgymaSMQ==", + "version": "2.4.27", + "resolved": "https://registry.npmjs.org/@volar/typescript/-/typescript-2.4.27.tgz", + "integrity": "sha512-eWaYCcl/uAPInSK2Lze6IqVWaBu/itVqR5InXcHXFyles4zO++Mglt3oxdgj75BDcv1Knr9Y93nowS8U3wqhxg==", "dev": true, + "license": "MIT", "dependencies": { - "@volar/language-core": "1.11.1", - "path-browserify": "^1.0.1" + "@volar/language-core": "2.4.27", + "path-browserify": "^1.0.1", + "vscode-uri": "^3.0.8" } }, "node_modules/@vue/compiler-core": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.4.5.tgz", - "integrity": "sha512-Daka7P1z2AgKjzuueWXhwzIsKu0NkLB6vGbNVEV2iJ8GJTrzraZo/Sk4GWCMRtd/qVi3zwnk+Owbd/xSZbwHtQ==", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.13.tgz", + "integrity": "sha512-oOdAkwqUfW1WqpwSYJce06wvt6HljgY3fGeM9NcVA1HaYOij3mZG9Rkysn0OHuyUAGMbEbARIpsG+LPVlBJ5/Q==", "dependencies": { - "@babel/parser": "^7.23.6", - "@vue/shared": "3.4.5", + "@babel/parser": "^7.25.3", + "@vue/shared": "3.5.13", "entities": "^4.5.0", "estree-walker": "^2.0.2", - "source-map-js": "^1.0.2" - } - }, - "node_modules/@vue/compiler-core/node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" + "source-map-js": "^1.2.0" } }, "node_modules/@vue/compiler-dom": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.4.5.tgz", - "integrity": "sha512-J8YlxknJVd90SXFJ4HwGANSAXsx5I0lK30sO/zvYV7s5gXf7gZR7r/1BmZ2ju7RGH1lnc6bpBc6nL61yW+PsAQ==", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.13.tgz", + "integrity": "sha512-ZOJ46sMOKUjO3e94wPdCzQ6P1Lx/vhp2RSvfaab88Ajexs0AHeV0uasYhi99WPaogmBlRHNRuly8xV75cNTMDA==", "dependencies": { - "@vue/compiler-core": "3.4.5", - "@vue/shared": "3.4.5" + "@vue/compiler-core": "3.5.13", + "@vue/shared": "3.5.13" } }, "node_modules/@vue/compiler-sfc": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.4.5.tgz", - "integrity": "sha512-jauvkDuSSUbP0ebhfNqljhShA90YEfX/0wZ+w40oZF43IjGyWYjqYaJbvMJwGOd+9+vODW6eSvnk28f0SGV7OQ==", - "dependencies": { - "@babel/parser": "^7.23.6", - "@vue/compiler-core": "3.4.5", - "@vue/compiler-dom": "3.4.5", - "@vue/compiler-ssr": "3.4.5", - "@vue/shared": "3.4.5", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.13.tgz", + "integrity": "sha512-6VdaljMpD82w6c2749Zhf5T9u5uLBWKnVue6XWxprDobftnletJ8+oel7sexFfM3qIxNmVE7LSFGTpv6obNyaQ==", + "dependencies": { + "@babel/parser": "^7.25.3", + "@vue/compiler-core": "3.5.13", + "@vue/compiler-dom": "3.5.13", + "@vue/compiler-ssr": "3.5.13", + "@vue/shared": "3.5.13", "estree-walker": "^2.0.2", - "magic-string": "^0.30.5", - "postcss": "^8.4.32", - "source-map-js": "^1.0.2" + "magic-string": "^0.30.11", + "postcss": "^8.4.48", + "source-map-js": "^1.2.0" } }, "node_modules/@vue/compiler-ssr": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.4.5.tgz", - "integrity": "sha512-DDdEcDzj2lWTMfUMMtEpLDhURai9LhM0zSZ219jCt7b2Vyl0/jy3keFgCPMitG0V1S1YG4Cmws3lWHWdxHQOpg==", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.13.tgz", + "integrity": "sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==", "dependencies": { - "@vue/compiler-dom": "3.4.5", - "@vue/shared": "3.4.5" + "@vue/compiler-dom": "3.5.13", + "@vue/shared": "3.5.13" } }, "node_modules/@vue/devtools-api": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-6.5.1.tgz", - "integrity": "sha512-+KpckaAQyfbvshdDW5xQylLni1asvNSGme1JFs8I1+/H5pHEhqUKMEQD/qn3Nx5+/nycBq11qAEi8lk+LXI2dA==" + "version": "6.6.4", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-6.6.4.tgz", + "integrity": "sha512-sGhTPMuXqZ1rVOk32RylztWkfXTRhuS7vgAKv0zjqk8gbsHkJ7xfFf+jbySxt7tWObEJwyKaHMikV/WGDiQm8g==" }, - "node_modules/@vue/eslint-config-airbnb": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/@vue/eslint-config-airbnb/-/eslint-config-airbnb-7.0.1.tgz", - "integrity": "sha512-8d5QJeaL3PxKXfCVNGSA7RSKk+GNQLkOAMgN33ST+wJaXSdUI/4dJ0+LfqoLOu92NqwmKiIEZWMyPiGywhS1BA==", - "dependencies": { - "eslint-config-airbnb-base": "^15.0.0", - "eslint-import-resolver-custom-alias": "^1.3.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-plugin-import": "^2.26.0", - "eslint-plugin-jsx-a11y": "^6.6.1", - "eslint-plugin-react": "^7.30.1", - "eslint-plugin-vuejs-accessibility": "^1.2.0", - "vue-eslint-parser": "^9.0.3" - }, - "peerDependencies": { - "eslint": "^8.2.0", - "eslint-plugin-vue": "^9.2.0" + "node_modules/@vue/devtools-kit": { + "version": "7.7.8", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-7.7.8.tgz", + "integrity": "sha512-4Y8op+AoxOJhB9fpcEF6d5vcJXWKgHxC3B0ytUB8zz15KbP9g9WgVzral05xluxi2fOeAy6t140rdQ943GcLRQ==", + "license": "MIT", + "dependencies": { + "@vue/devtools-shared": "^7.7.8", + "birpc": "^2.3.0", + "hookable": "^5.5.3", + "mitt": "^3.0.1", + "perfect-debounce": "^1.0.0", + "speakingurl": "^14.0.1", + "superjson": "^2.2.2" + } + }, + "node_modules/@vue/devtools-shared": { + "version": "7.7.8", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-7.7.8.tgz", + "integrity": "sha512-XHpO3jC5nOgYr40M9p8Z4mmKfTvUxKyRcUnpBAYg11pE78eaRFBKb0kG5yKLroMuJeeNH9LWmKp2zMU5LUc7CA==", + "license": "MIT", + "dependencies": { + "rfdc": "^1.4.1" } }, "node_modules/@vue/eslint-config-typescript": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@vue/eslint-config-typescript/-/eslint-config-typescript-11.0.3.tgz", - "integrity": "sha512-dkt6W0PX6H/4Xuxg/BlFj5xHvksjpSlVjtkQCpaYJBIEuKj2hOVU7r+TIe+ysCwRYFz/lGqvklntRkCAibsbPw==", + "version": "14.6.0", + "resolved": "https://registry.npmjs.org/@vue/eslint-config-typescript/-/eslint-config-typescript-14.6.0.tgz", + "integrity": "sha512-UpiRY/7go4Yps4mYCjkvlIbVWmn9YvPGQDxTAlcKLphyaD77LjIu3plH4Y9zNT0GB4f3K5tMmhhtRhPOgrQ/bQ==", + "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "^5.59.1", - "@typescript-eslint/parser": "^5.59.1", - "vue-eslint-parser": "^9.1.1" + "@typescript-eslint/utils": "^8.35.1", + "fast-glob": "^3.3.3", + "typescript-eslint": "^8.35.1", + "vue-eslint-parser": "^10.2.0" }, "engines": { - "node": "^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "peerDependencies": { - "eslint": "^6.2.0 || ^7.0.0 || ^8.0.0", - "eslint-plugin-vue": "^9.0.0", - "typescript": "*" + "eslint": "^9.10.0", + "eslint-plugin-vue": "^9.28.0 || ^10.0.0", + "typescript": ">=4.8.4" }, "peerDependenciesMeta": { "typescript": { @@ -2142,231 +3520,173 @@ } }, "node_modules/@vue/language-core": { - "version": "1.8.27", - "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-1.8.27.tgz", - "integrity": "sha512-L8Kc27VdQserNaCUNiSFdDl9LWT24ly8Hpwf1ECy3aFb9m6bDhBGQYOujDm21N7EW3moKIOKEanQwe1q5BK+mA==", - "dev": true, - "dependencies": { - "@volar/language-core": "~1.11.1", - "@volar/source-map": "~1.11.1", - "@vue/compiler-dom": "^3.3.0", - "@vue/shared": "^3.3.0", - "computeds": "^0.0.1", - "minimatch": "^9.0.3", - "muggle-string": "^0.3.1", - "path-browserify": "^1.0.1", - "vue-template-compiler": "^2.7.14" - }, - "peerDependencies": { - "typescript": "*" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@vue/language-core/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.2.4.tgz", + "integrity": "sha512-bqBGuSG4KZM45KKTXzGtoCl9cWju5jsaBKaJJe3h5hRAAWpZUuj5G+L+eI01sPIkm4H6setKRlw7E85wLdDNew==", "dev": true, + "license": "MIT", "dependencies": { - "balanced-match": "^1.0.0" + "@volar/language-core": "2.4.27", + "@vue/compiler-dom": "^3.5.0", + "@vue/shared": "^3.5.0", + "alien-signals": "^3.0.0", + "muggle-string": "^0.4.1", + "path-browserify": "^1.0.1", + "picomatch": "^4.0.2" } }, - "node_modules/@vue/language-core/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "node_modules/@vue/language-core/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, + "license": "MIT", "engines": { - "node": ">=16 || 14 >=14.17" + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/sponsors/jonschlinkert" } }, "node_modules/@vue/reactivity": { - "version": "3.4.13", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.13.tgz", - "integrity": "sha512-/ZdUOrGKkGVONzVJkfDqNcn2fLMvaa5VlYx2KwTbnRbX06YZ4GJE0PVTmWzIxtBYdpSTLLXgw3pDggO+96KXzg==", + "version": "3.5.27", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.27.tgz", + "integrity": "sha512-vvorxn2KXfJ0nBEnj4GYshSgsyMNFnIQah/wczXlsNXt+ijhugmW+PpJ2cNPe4V6jpnBcs0MhCODKllWG+nvoQ==", + "license": "MIT", "dependencies": { - "@vue/shared": "3.4.13" + "@vue/shared": "3.5.27" } }, "node_modules/@vue/reactivity/node_modules/@vue/shared": { - "version": "3.4.13", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.13.tgz", - "integrity": "sha512-56crFKLPpzk85WXX1L1c0QzPOuoapWlPVys8eMG8kkRmqdMjWUqK8KpFdE2d7BQA4CEbXwyyHPq6MpFr8H9rcg==" + "version": "3.5.27", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.27.tgz", + "integrity": "sha512-dXr/3CgqXsJkZ0n9F3I4elY8wM9jMJpP3pvRG52r6m0tu/MsAFIe6JpXVGeNMd/D9F4hQynWT8Rfuj0bdm9kFQ==", + "license": "MIT" }, "node_modules/@vue/runtime-core": { - "version": "3.4.13", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.13.tgz", - "integrity": "sha512-Ov4d4At7z3goxqzSqQxdfVYEcN5HY4dM1uDYL6Hu/Es9Za9BEN602zyjWhhi2+BEki5F9NizRSvn02k/tqNWlg==", + "version": "3.5.27", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.27.tgz", + "integrity": "sha512-fxVuX/fzgzeMPn/CLQecWeDIFNt3gQVhxM0rW02Tvp/YmZfXQgcTXlakq7IMutuZ/+Ogbn+K0oct9J3JZfyk3A==", + "license": "MIT", "dependencies": { - "@vue/reactivity": "3.4.13", - "@vue/shared": "3.4.13" + "@vue/reactivity": "3.5.27", + "@vue/shared": "3.5.27" } }, "node_modules/@vue/runtime-core/node_modules/@vue/shared": { - "version": "3.4.13", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.13.tgz", - "integrity": "sha512-56crFKLPpzk85WXX1L1c0QzPOuoapWlPVys8eMG8kkRmqdMjWUqK8KpFdE2d7BQA4CEbXwyyHPq6MpFr8H9rcg==" + "version": "3.5.27", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.27.tgz", + "integrity": "sha512-dXr/3CgqXsJkZ0n9F3I4elY8wM9jMJpP3pvRG52r6m0tu/MsAFIe6JpXVGeNMd/D9F4hQynWT8Rfuj0bdm9kFQ==", + "license": "MIT" }, "node_modules/@vue/runtime-dom": { - "version": "3.4.13", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.13.tgz", - "integrity": "sha512-ynde9p16eEV3u1VCxUre2e0nKzD0l3NzH0r599+bXeLT1Yhac8Atcot3iL9XNqwolxYCI89KBII+2MSVzfrz6w==", + "version": "3.5.27", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.27.tgz", + "integrity": "sha512-/QnLslQgYqSJ5aUmb5F0z0caZPGHRB8LEAQ1s81vHFM5CBfnun63rxhvE/scVb/j3TbBuoZwkJyiLCkBluMpeg==", + "license": "MIT", "dependencies": { - "@vue/runtime-core": "3.4.13", - "@vue/shared": "3.4.13", - "csstype": "^3.1.3" + "@vue/reactivity": "3.5.27", + "@vue/runtime-core": "3.5.27", + "@vue/shared": "3.5.27", + "csstype": "^3.2.3" } }, "node_modules/@vue/runtime-dom/node_modules/@vue/shared": { - "version": "3.4.13", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.13.tgz", - "integrity": "sha512-56crFKLPpzk85WXX1L1c0QzPOuoapWlPVys8eMG8kkRmqdMjWUqK8KpFdE2d7BQA4CEbXwyyHPq6MpFr8H9rcg==" + "version": "3.5.27", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.27.tgz", + "integrity": "sha512-dXr/3CgqXsJkZ0n9F3I4elY8wM9jMJpP3pvRG52r6m0tu/MsAFIe6JpXVGeNMd/D9F4hQynWT8Rfuj0bdm9kFQ==", + "license": "MIT" }, "node_modules/@vue/server-renderer": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.4.5.tgz", - "integrity": "sha512-jOFc/VE87yvifQpNju12VcqimH8pBLxdcT+t3xMeiED1K6DfH9SORyhFEoZlW5TG2Vwfn3Ul5KE+1aC99xnSBg==", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.13.tgz", + "integrity": "sha512-wAi4IRJV/2SAW3htkTlB+dHeRmpTiVIK1OGLWV1yeStVSebSQQOwGwIq0D3ZIoBj2C2qpgz5+vX9iEBkTdk5YA==", "dependencies": { - "@vue/compiler-ssr": "3.4.5", - "@vue/shared": "3.4.5" + "@vue/compiler-ssr": "3.5.13", + "@vue/shared": "3.5.13" }, "peerDependencies": { - "vue": "3.4.5" + "vue": "3.5.13" } }, "node_modules/@vue/shared": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.5.tgz", - "integrity": "sha512-6XptuzlMvN4l4cDnDw36pdGEV+9njYkQ1ZE0Q6iZLwrKefKaOJyiFmcP3/KBDHbt72cJZGtllAc1GaHe6XGAyg==" + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.13.tgz", + "integrity": "sha512-/hnE/qP5ZoGpol0a5mDi45bOd7t3tjYJBjsgCsivow7D48cJeV5l05RD82lPqi7gRiphZM37rnhW1l6ZoCNNnQ==" }, "node_modules/@vue/test-utils": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/@vue/test-utils/-/test-utils-2.4.3.tgz", - "integrity": "sha512-F4K7mF+ad++VlTrxMJVRnenKSJmO6fkQt2wpRDiKDesQMkfpniGWsqEi/JevxGBo2qEkwwjvTUAoiGJLNx++CA==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@vue/test-utils/-/test-utils-2.4.6.tgz", + "integrity": "sha512-FMxEjOpYNYiFe0GkaHsnJPXFHxQ6m4t8vI/ElPGpMWxZKpmRvQ33OIrvRXemy6yha03RxhOlQuy+gZMC3CQSow==", "dev": true, "dependencies": { "js-beautify": "^1.14.9", - "vue-component-type-helpers": "^1.8.21" - }, - "peerDependencies": { - "@vue/server-renderer": "^3.0.1", - "vue": "^3.0.1" - }, - "peerDependenciesMeta": { - "@vue/server-renderer": { - "optional": true - } + "vue-component-type-helpers": "^2.0.0" } }, + "node_modules/@vue/test-utils/node_modules/vue-component-type-helpers": { + "version": "2.2.12", + "resolved": "https://registry.npmjs.org/vue-component-type-helpers/-/vue-component-type-helpers-2.2.12.tgz", + "integrity": "sha512-YbGqHZ5/eW4SnkPNR44mKVc6ZKQoRs/Rux1sxC6rdwXb4qpbOSYfDr9DsTHolOTGmIKgM9j141mZbBeg05R1pw==", + "dev": true, + "license": "MIT" + }, "node_modules/@vuetify/loader-shared": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@vuetify/loader-shared/-/loader-shared-2.0.1.tgz", - "integrity": "sha512-zy5/ohEO7RcJaWYu2Xiy8TBEOkTb42XvWvSAJwXAtY8OlwqyGhzzBp9OvMVjLGIuFXumBpXKlsaVIkeN0OWWSw==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@vuetify/loader-shared/-/loader-shared-2.1.2.tgz", + "integrity": "sha512-X+1jBLmXHkpQEnC0vyOb4rtX2QSkBiFhaFXz8yhQqN2A4vQ6k2nChxN4Ol7VAY5KoqMdFoRMnmNdp/1qYXDQig==", "devOptional": true, + "license": "MIT", "dependencies": { "upath": "^2.0.1" }, "peerDependencies": { "vue": "^3.0.0", - "vuetify": "^3.0.0" + "vuetify": ">=3" } }, "node_modules/@vueuse/core": { - "version": "8.9.4", - "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-8.9.4.tgz", - "integrity": "sha512-B/Mdj9TK1peFyWaPof+Zf/mP9XuGAngaJZBwPaXBvU3aCTZlx3ltlrFFFyMV4iGBwsjSCeUCgZrtkEj9dS2Y3Q==", + "version": "13.9.0", + "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-13.9.0.tgz", + "integrity": "sha512-ts3regBQyURfCE2BcytLqzm8+MmLlo5Ln/KLoxDVcsZ2gzIwVNnQpQOL/UKV8alUqjSZOlpFZcRNsLRqj+OzyA==", + "license": "MIT", "dependencies": { - "@types/web-bluetooth": "^0.0.14", - "@vueuse/metadata": "8.9.4", - "@vueuse/shared": "8.9.4", - "vue-demi": "*" + "@types/web-bluetooth": "^0.0.21", + "@vueuse/metadata": "13.9.0", + "@vueuse/shared": "13.9.0" }, "funding": { "url": "https://github.com/sponsors/antfu" }, "peerDependencies": { - "@vue/composition-api": "^1.1.0", - "vue": "^2.6.0 || ^3.2.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - }, - "vue": { - "optional": true - } + "vue": "^3.5.0" } }, - "node_modules/@vueuse/core/node_modules/@vueuse/shared": { - "version": "8.9.4", - "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-8.9.4.tgz", - "integrity": "sha512-wt+T30c4K6dGRMVqPddexEVLa28YwxW5OFIPmzUHICjphfAuBFTTdDoyqREZNDOFJZ44ARH1WWQNCUK8koJ+Ag==", - "dependencies": { - "vue-demi": "*" - }, + "node_modules/@vueuse/metadata": { + "version": "13.9.0", + "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-13.9.0.tgz", + "integrity": "sha512-1AFRvuiGphfF7yWixZa0KwjYH8ulyjDCC0aFgrGRz8+P4kvDFSdXLVfTk5xAN9wEuD1J6z4/myMoYbnHoX07zg==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/antfu" - }, - "peerDependencies": { - "@vue/composition-api": "^1.1.0", - "vue": "^2.6.0 || ^3.2.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - }, - "vue": { - "optional": true - } } }, - "node_modules/@vueuse/core/node_modules/vue-demi": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.5.tgz", - "integrity": "sha512-o9NUVpl/YlsGJ7t+xuqJKx8EBGf1quRhCiT6D/J0pfwmk9zUwYkC7yrF4SZCe6fETvSM3UNL2edcbYrSyc4QHA==", - "hasInstallScript": true, - "bin": { - "vue-demi-fix": "bin/vue-demi-fix.js", - "vue-demi-switch": "bin/vue-demi-switch.js" - }, - "engines": { - "node": ">=12" - }, + "node_modules/@vueuse/shared": { + "version": "13.9.0", + "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-13.9.0.tgz", + "integrity": "sha512-e89uuTLMh0U5cZ9iDpEI2senqPGfbPRTHM/0AaQkcxnpqjkZqDYP8rpfm7edOz8s+pOCOROEy1PIveSW8+fL5g==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/antfu" }, "peerDependencies": { - "@vue/composition-api": "^1.0.0-rc.1", - "vue": "^3.0.0-0 || ^2.6.0" - }, - "peerDependenciesMeta": { - "@vue/composition-api": { - "optional": true - } - } - }, - "node_modules/@vueuse/metadata": { - "version": "8.9.4", - "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-8.9.4.tgz", - "integrity": "sha512-IwSfzH80bnJMzqhaapqJl9JRIiyQU0zsRGEgnxN6jhq7992cPUJIRfV+JHRIZXjYqbwt07E1gTEp0R0zPJ1aqw==", - "funding": { - "url": "https://github.com/sponsors/antfu" + "vue": "^3.5.0" } }, "node_modules/@webassemblyjs/ast": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz", - "integrity": "sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz", + "integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==", "dev": true, "peer": true, "dependencies": { @@ -2389,9 +3709,9 @@ "peer": true }, "node_modules/@webassemblyjs/helper-buffer": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz", - "integrity": "sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz", + "integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==", "dev": true, "peer": true }, @@ -2415,16 +3735,16 @@ "peer": true }, "node_modules/@webassemblyjs/helper-wasm-section": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz", - "integrity": "sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz", + "integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==", "dev": true, "peer": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", - "@webassemblyjs/helper-buffer": "1.11.6", + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/wasm-gen": "1.11.6" + "@webassemblyjs/wasm-gen": "1.12.1" } }, "node_modules/@webassemblyjs/ieee754": { @@ -2455,30 +3775,30 @@ "peer": true }, "node_modules/@webassemblyjs/wasm-edit": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz", - "integrity": "sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz", + "integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==", "dev": true, "peer": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", - "@webassemblyjs/helper-buffer": "1.11.6", + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/helper-wasm-section": "1.11.6", - "@webassemblyjs/wasm-gen": "1.11.6", - "@webassemblyjs/wasm-opt": "1.11.6", - "@webassemblyjs/wasm-parser": "1.11.6", - "@webassemblyjs/wast-printer": "1.11.6" + "@webassemblyjs/helper-wasm-section": "1.12.1", + "@webassemblyjs/wasm-gen": "1.12.1", + "@webassemblyjs/wasm-opt": "1.12.1", + "@webassemblyjs/wasm-parser": "1.12.1", + "@webassemblyjs/wast-printer": "1.12.1" } }, "node_modules/@webassemblyjs/wasm-gen": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz", - "integrity": "sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz", + "integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==", "dev": true, "peer": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", + "@webassemblyjs/ast": "1.12.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.6", "@webassemblyjs/ieee754": "1.11.6", "@webassemblyjs/leb128": "1.11.6", @@ -2486,26 +3806,26 @@ } }, "node_modules/@webassemblyjs/wasm-opt": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz", - "integrity": "sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz", + "integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==", "dev": true, "peer": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", - "@webassemblyjs/helper-buffer": "1.11.6", - "@webassemblyjs/wasm-gen": "1.11.6", - "@webassemblyjs/wasm-parser": "1.11.6" + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", + "@webassemblyjs/wasm-gen": "1.12.1", + "@webassemblyjs/wasm-parser": "1.12.1" } }, "node_modules/@webassemblyjs/wasm-parser": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz", - "integrity": "sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz", + "integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==", "dev": true, "peer": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", + "@webassemblyjs/ast": "1.12.1", "@webassemblyjs/helper-api-error": "1.11.6", "@webassemblyjs/helper-wasm-bytecode": "1.11.6", "@webassemblyjs/ieee754": "1.11.6", @@ -2514,16 +3834,40 @@ } }, "node_modules/@webassemblyjs/wast-printer": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz", - "integrity": "sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz", + "integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==", "dev": true, "peer": true, "dependencies": { - "@webassemblyjs/ast": "1.11.6", + "@webassemblyjs/ast": "1.12.1", "@xtuc/long": "4.2.2" } }, + "node_modules/@xterm/addon-attach": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@xterm/addon-attach/-/addon-attach-0.11.0.tgz", + "integrity": "sha512-JboCN0QAY6ZLY/SSB/Zl2cQ5zW1Eh4X3fH7BnuR1NB7xGRhzbqU2Npmpiw/3zFlxDaU88vtKzok44JKi2L2V2Q==", + "license": "MIT", + "peerDependencies": { + "@xterm/xterm": "^5.0.0" + } + }, + "node_modules/@xterm/addon-fit": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/@xterm/addon-fit/-/addon-fit-0.10.0.tgz", + "integrity": "sha512-UFYkDm4HUahf2lnEyHvio51TNGiLK66mqP2JoATy7hRZeXaGMRDr00JiSF7m63vR5WKATF605yEggJKsw0JpMQ==", + "license": "MIT", + "peerDependencies": { + "@xterm/xterm": "^5.0.0" + } + }, + "node_modules/@xterm/xterm": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@xterm/xterm/-/xterm-5.5.0.tgz", + "integrity": "sha512-hqJHYaQb5OptNunnyAnkHyM8aCjZ1MEIDTQu1iIbbTD/xops91NB5yq1ZK/dC2JDbVWtF23zUtl9JE2NqwT87A==", + "license": "MIT" + }, "node_modules/@xtuc/ieee754": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", @@ -2538,12 +3882,6 @@ "dev": true, "peer": true }, - "node_modules/abab": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", - "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", - "dev": true - }, "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -2563,9 +3901,10 @@ } }, "node_modules/acorn": { - "version": "8.11.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", - "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -2573,20 +3912,10 @@ "node": ">=0.4.0" } }, - "node_modules/acorn-globals": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-7.0.1.tgz", - "integrity": "sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==", - "dev": true, - "dependencies": { - "acorn": "^8.1.0", - "acorn-walk": "^8.0.2" - } - }, - "node_modules/acorn-import-assertions": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz", - "integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==", + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", "dev": true, "peer": true, "peerDependencies": { @@ -2597,35 +3926,26 @@ "version": "5.3.2", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/acorn-walk": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", - "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", "dev": true, - "dependencies": { - "debug": "4" - }, "engines": { - "node": ">= 6.0.0" + "node": ">= 14" } }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -2646,18 +3966,18 @@ "ajv": "^6.9.1" } }, - "node_modules/amdefine": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", - "integrity": "sha512-S2Hw0TtNkMJhIabBwIojKL9YHO5T0n5eNqWJ7Lrlel/zDbftQpxpapi8tZs3X1HWa+u+QeydGmzzNU0m09+Rcg==", - "engines": { - "node": ">=0.4.2" - } + "node_modules/alien-signals": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-3.1.2.tgz", + "integrity": "sha512-d9dYqZTS90WLiU0I5c6DHj/HcKkF8ZyGN3G5x8wSbslulz70KOxaqCT0hQCo9KOyhVqzqGojvNdJXoTumZOtcw==", + "dev": true, + "license": "MIT" }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "engines": { "node": ">=8" } @@ -2666,6 +3986,7 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "dependencies": { "color-convert": "^2.0.1" }, @@ -2676,16 +3997,16 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "node_modules/ansi-styles/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" + "color-name": "~1.1.4" }, "engines": { - "node": ">= 8" + "node": ">=7.0.0" } }, "node_modules/argparse": { @@ -2693,21 +4014,18 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, - "node_modules/aria-query": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz", - "integrity": "sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==", - "dependencies": { - "deep-equal": "^2.0.5" - } - }, "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2719,15 +4037,20 @@ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, "node_modules/array-includes": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", - "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz", + "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "is-string": "^1.0.7" + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.24.0", + "es-object-atoms": "^1.1.1", + "get-intrinsic": "^1.3.0", + "is-string": "^1.1.1", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -2736,23 +4059,39 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "node_modules/array.prototype.findlastindex": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz", + "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-shim-unscopables": "^1.1.0" + }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", + "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -2762,14 +4101,16 @@ } }, "node_modules/array.prototype.flatmap": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", - "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", + "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -2778,22 +4119,39 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.tosorted": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz", - "integrity": "sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==", + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/asap": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + "node_modules/asciinema-player": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/asciinema-player/-/asciinema-player-3.14.0.tgz", + "integrity": "sha512-44m3CpNavn8i7DSr/AeeV+rJpHpcqc/OCildCs9FAu5gnXB6XNBdbhfg6mHMG4uU3R1rxFNA3ZRTt8FMhHC48Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@babel/runtime": "^7.21.0", + "solid-js": "^1.3.0", + "solid-transition-group": "^0.2.3" + } }, "node_modules/asn1": { "version": "0.2.6", @@ -2840,36 +4198,38 @@ } }, "node_modules/assertion-error": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", "dev": true, + "license": "MIT", "engines": { - "node": "*" + "node": ">=12" } }, - "node_modules/ast-types": { - "version": "0.9.6", - "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.9.6.tgz", - "integrity": "sha512-qEdtR2UH78yyHX/AUNfXmJTlM48XoFZKBdwi1nzkI1mJL21cmbu0cvjxjpkXJ5NENMq42H+hNs8VLJcqXLerBQ==", + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "dev": true, + "license": "MIT", "engines": { - "node": ">= 0.8" + "node": ">= 0.4" } }, - "node_modules/ast-types-flow": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", - "integrity": "sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==" - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -2877,21 +4237,14 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/axe-core": { - "version": "4.7.1", - "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.7.1.tgz", - "integrity": "sha512-sCXXUhA+cljomZ3ZAwb8i1p3oOlkABzPy08ZDAoGcYuvtBPlQ1Ytde129ArXyHWDhfeewq7rlx9F+cUx2SSlkg==", - "engines": { - "node": ">=4" - } - }, "node_modules/axios": { - "version": "1.6.5", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.5.tgz", - "integrity": "sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==", + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.3.tgz", + "integrity": "sha512-ERT8kdX7DZjtUm7IitEyV7InTHAF42iJuMArIiDIV5YtPanJkgw4hw5Dyg9fh0mihdWNn1GKaeIWErfe56UQ1g==", + "license": "MIT", "dependencies": { - "follow-redirects": "^1.15.4", - "form-data": "^4.0.0", + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, @@ -2908,26 +4261,11 @@ "axios": ">= 0.17.0" } }, - "node_modules/axobject-query": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.1.1.tgz", - "integrity": "sha512-goKlv8DZrK9hUh975fnHzhNIO4jUnFCfv/dszV5VwUGDFjI6vQ2VwoyjYjYNEbBE8AH87TduWP5uyDR1D+Iteg==", - "dependencies": { - "deep-equal": "^2.0.5" - } - }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "node_modules/base62": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/base62/-/base62-1.2.8.tgz", - "integrity": "sha512-V6YHUbjLxN1ymqNLb1DPHoU1CpfdL7d2YTIp5W3U4hhoG4hhxNmsFDs66M9EXxBiSEke5Bt5dwdfMwwZF70iLA==", - "engines": { - "node": "*" - } + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true }, "node_modules/base64-js": { "version": "1.5.1", @@ -2966,40 +4304,35 @@ "node": "*" } }, - "node_modules/binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", - "engines": { - "node": ">=8" + "node_modules/birpc": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-2.8.0.tgz", + "integrity": "sha512-Bz2a4qD/5GRhiHSwj30c/8kC8QGj12nNDwz3D4ErQ4Xhy35dsSDvF+RA/tWpjyU0pdGtSDiEk6B5fBGE1qNVhw==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/antfu" } }, - "node_modules/blueimp-md5": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/blueimp-md5/-/blueimp-md5-2.19.0.tgz", - "integrity": "sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==", - "dev": true - }, "node_modules/bn.js": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" }, "node_modules/body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "dependencies": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", - "raw-body": "2.5.1", + "qs": "6.13.0", + "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, @@ -3024,23 +4357,25 @@ "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", + "dev": true }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -3051,6 +4386,15 @@ "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==" }, + "node_modules/browser-resolve": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/browser-resolve/-/browser-resolve-2.0.0.tgz", + "integrity": "sha512-7sWsQlYL2rGLy2IWm8WL8DCTJvYLc/qlOnsakDac87SOoCd16WLsaAMdCiAqsTNHIe+SXfaqyxyo6THoWqs8WQ==", + "dev": true, + "dependencies": { + "resolve": "^1.17.0" + } + }, "node_modules/browserify-aes": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", @@ -3113,11 +4457,19 @@ "node": ">= 4" } }, - "node_modules/browserslist": { - "version": "4.21.5", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.5.tgz", - "integrity": "sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w==", + "node_modules/browserify-zlib": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", + "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", "dev": true, + "dependencies": { + "pako": "~1.0.5" + } + }, + "node_modules/browserslist": { + "version": "4.24.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz", + "integrity": "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==", "funding": [ { "type": "opencollective", @@ -3126,14 +4478,17 @@ { "type": "tidelift", "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" } ], - "peer": true, "dependencies": { - "caniuse-lite": "^1.0.30001449", - "electron-to-chromium": "^1.4.284", - "node-releases": "^2.0.8", - "update-browserslist-db": "^1.0.10" + "caniuse-lite": "^1.0.30001669", + "electron-to-chromium": "^1.5.41", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.1" }, "bin": { "browserslist": "cli.js" @@ -3177,17 +4532,11 @@ "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", "integrity": "sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==" }, - "node_modules/builtin-modules": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", - "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", - "dev": true, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } + "node_modules/builtin-status-codes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", + "integrity": "sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ==", + "dev": true }, "node_modules/bytes": { "version": "3.1.2", @@ -3197,32 +4546,38 @@ "node": ">= 0.8" } }, - "node_modules/c8": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/c8/-/c8-7.13.0.tgz", - "integrity": "sha512-/NL4hQTv1gBL6J6ei80zu3IiTrmePDKXKXOTLpHvcIWZTVYQlDhVWjjWvkhICylE8EwwnMVzDZugCvdx0/DIIA==", - "dev": true, - "dependencies": { - "@bcoe/v8-coverage": "^0.2.3", - "@istanbuljs/schema": "^0.1.3", - "find-up": "^5.0.0", - "foreground-child": "^2.0.0", - "istanbul-lib-coverage": "^3.2.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-reports": "^3.1.4", - "rimraf": "^3.0.2", - "test-exclude": "^6.0.0", - "v8-to-istanbul": "^9.0.0", - "yargs": "^16.2.0", - "yargs-parser": "^20.2.9" + "node_modules/c12": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/c12/-/c12-2.0.2.tgz", + "integrity": "sha512-NkvlL5CHZt9kPswJYDCUYtTaMt7JOfcpsnNncfj7sWsc13x6Wz+GiTpBtqZOojFlzyTHui8+OAfR6praV6PYaQ==", + "dependencies": { + "chokidar": "^4.0.3", + "confbox": "^0.1.8", + "defu": "^6.1.4", + "dotenv": "^16.4.7", + "giget": "^1.2.4", + "jiti": "^2.4.2", + "mlly": "^1.7.4", + "ohash": "^1.1.4", + "pathe": "^2.0.2", + "perfect-debounce": "^1.0.0", + "pkg-types": "^1.3.1", + "rc9": "^2.1.2" }, - "bin": { - "c8": "bin/c8.js" + "peerDependencies": { + "magicast": "^0.3.5" }, - "engines": { - "node": ">=10.12.0" + "peerDependenciesMeta": { + "magicast": { + "optional": true + } } }, + "node_modules/c12/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, "node_modules/cac": { "version": "6.7.14", "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", @@ -3232,12 +4587,46 @@ } }, "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3247,15 +4636,16 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/caniuse-lite": { - "version": "1.0.30001489", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001489.tgz", - "integrity": "sha512-x1mgZEXK8jHIfAxm+xgdpHpk50IN3z3q3zP261/WS+uvePxW8izXuCu6AHz0lkuYTlATDehiZ/tNyYBdSQsOUQ==", - "dev": true, + "version": "1.0.30001680", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001680.tgz", + "integrity": "sha512-rPQy70G6AGUMnbwS1z6Xg+RkHYPAi18ihs47GH0jcxIG7wArmPgY3XbS2sRdBbxJljp3thdT8BIqv9ccCypiPA==", "funding": [ { "type": "opencollective", @@ -3269,31 +4659,30 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ], - "peer": true + ] }, "node_modules/chai": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz", - "integrity": "sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.2.tgz", + "integrity": "sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==", "dev": true, + "license": "MIT", "dependencies": { - "assertion-error": "^1.1.0", - "check-error": "^1.0.2", - "deep-eql": "^4.1.2", - "get-func-name": "^2.0.0", - "loupe": "^2.3.1", - "pathval": "^1.1.1", - "type-detect": "^4.0.5" + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" }, "engines": { - "node": ">=4" + "node": ">=12" } }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -3306,49 +4695,35 @@ } }, "node_modules/check-error": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", - "integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", "dev": true, + "license": "MIT", "engines": { - "node": "*" + "node": ">= 16" } }, "node_modules/chokidar": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", - "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", - "funding": [ - { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - ], + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" + "readdirp": "^4.0.1" }, "engines": { - "node": ">= 8.10.0" + "node": ">= 14.16.0" }, - "optionalDependencies": { - "fsevents": "~2.3.2" + "funding": { + "url": "https://paulmillr.com/funding/" } }, - "node_modules/chokidar/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dependencies": { - "is-glob": "^4.0.1" - }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", "engines": { - "node": ">= 6" + "node": ">=10" } }, "node_modules/chrome-trace-event": { @@ -3362,40 +4737,31 @@ } }, "node_modules/cipher-base": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", - "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dev": true, + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.6.tgz", + "integrity": "sha512-3Ek9H3X6pj5TgenXYtNWdaBon1tgYCaebd+XPg0keyjEbEfkD4KkmAxkQ/i1vYvxdcT5nscLBfq9VJRmCBcFSw==", + "license": "MIT", "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" + "inherits": "^2.0.4", + "safe-buffer": "^5.2.1" + }, + "engines": { + "node": ">= 0.10" } }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/citty": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", + "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" + "consola": "^3.2.3" } }, "node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/combined-stream": { "version": "1.0.8", @@ -3411,74 +4777,19 @@ "node_modules/commander": { "version": "2.20.3", "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "node_modules/commoner": { - "version": "0.10.8", - "resolved": "https://registry.npmjs.org/commoner/-/commoner-0.10.8.tgz", - "integrity": "sha512-3/qHkNMM6o/KGXHITA14y78PcfmXh4+AOCJpSoF73h4VY1JpdGv3CHMS5+JW6SwLhfJt4RhNmLAa7+RRX/62EQ==", - "dependencies": { - "commander": "^2.5.0", - "detective": "^4.3.1", - "glob": "^5.0.15", - "graceful-fs": "^4.1.2", - "iconv-lite": "^0.4.5", - "mkdirp": "^0.5.0", - "private": "^0.1.6", - "q": "^1.1.2", - "recast": "^0.11.17" - }, - "bin": { - "commonize": "bin/commonize" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/commoner/node_modules/glob": { - "version": "5.0.15", - "resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz", - "integrity": "sha512-c9IPMazfRITpmAAKi22dK1VKxGDX9ehhqfABDriL/lzO92xcUKEJPQHrVA/2YHSNFB4iFlykVmWvwo48nr3OxA==", - "dependencies": { - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "2 || 3", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - } - }, - "node_modules/computeds": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/computeds/-/computeds-0.0.1.tgz", - "integrity": "sha512-7CEBgcMjVmitjYo5q8JTJVra6X5mQ20uTThdK+0kR7UEaDrAWEQcRiBtWJzga4eRpP6afNwwLsX2SET2JhVB1Q==", - "dev": true + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "devOptional": true }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true }, - "node_modules/concordance": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/concordance/-/concordance-5.0.4.tgz", - "integrity": "sha512-OAcsnTEYu1ARJqWVGwf4zh4JDfHZEaSNlNccFmt8YjB2l/n19/PF2viLINHc57vO4FKIAFl2FWASIGZZWZ2Kxw==", - "dev": true, - "dependencies": { - "date-time": "^3.1.0", - "esutils": "^2.0.3", - "fast-diff": "^1.2.0", - "js-string-escape": "^1.0.1", - "lodash": "^4.17.15", - "md5-hex": "^3.0.1", - "semver": "^7.3.2", - "well-known-symbols": "^2.0.0" - }, - "engines": { - "node": ">=10.18.0 <11 || >=12.14.0 <13 || >=14" - } + "node_modules/confbox": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", + "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==" }, "node_modules/config-chain": { "version": "1.1.13", @@ -3490,10 +4801,25 @@ "proto-list": "~1.2.1" } }, - "node_modules/confusing-browser-globals": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz", - "integrity": "sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==" + "node_modules/consola": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.0.tgz", + "integrity": "sha512-EiPU8G6dQG0GFHNR8ljnZFki/8a+cQwEQ+7wpxdChl02Q8HXlwEZWD5lqAF8vC2sEC3Tehr8hy7vErz88LHyUA==", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/console-browserify": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", + "integrity": "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==", + "dev": true + }, + "node_modules/constants-browserify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", + "integrity": "sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ==", + "dev": true }, "node_modules/content-disposition": { "version": "0.5.4", @@ -3515,15 +4841,14 @@ } }, "node_modules/convert-source-map": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", - "dev": true + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "engines": { "node": ">= 0.6" } @@ -3533,11 +4858,20 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, - "node_modules/core-js": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", - "integrity": "sha512-ZiPp9pZlgxpWRu0M+YWbm6+aQ84XEfH1JRXvfOc/fILWI0VKhLC2LX13X1NYq4fULzLMq7Hfh43CSo2/aIaUPA==", - "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js." + "node_modules/copy-anything": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-4.0.5.tgz", + "integrity": "sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA==", + "license": "MIT", + "dependencies": { + "is-what": "^5.2.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } }, "node_modules/create-ecdh": { "version": "4.0.4", @@ -3578,19 +4912,17 @@ "sha.js": "^2.4.8" } }, - "node_modules/create-react-class": { - "version": "15.7.0", - "resolved": "https://registry.npmjs.org/create-react-class/-/create-react-class-15.7.0.tgz", - "integrity": "sha512-QZv4sFWG9S5RUvkTYWbflxeZX+JG7Cz0Tn33rQBJ+WFQTqTfUTjMjiv9tnfXazjsO5r0KhPs+AqCjyrQX6h2ng==", - "dependencies": { - "loose-envify": "^1.3.1", - "object-assign": "^4.1.1" - } + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -3625,6 +4957,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, "bin": { "cssesc": "bin/cssesc" }, @@ -3639,26 +4972,23 @@ "dev": true }, "node_modules/cssstyle": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-3.0.0.tgz", - "integrity": "sha512-N4u2ABATi3Qplzf0hWbVCdjenim8F3ojEXpBDF5hBpjzW182MjNGLqfmQ0SkSPeQ+V86ZXgeH8aXj6kayd4jgg==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.3.0.tgz", + "integrity": "sha512-6r0NiY0xizYqfBvWp1G7WXJ06/bZyrk7Dc6PHql82C/pKGUTKu4yAX4Y8JPamb1ob9nBKuxWzCGTRuGwU3yxJQ==", "dev": true, "dependencies": { - "rrweb-cssom": "^0.6.0" + "@asamuzakjp/css-color": "^3.1.1", + "rrweb-cssom": "^0.8.0" }, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" - }, - "node_modules/damerau-levenshtein": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", - "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==" + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "license": "MIT" }, "node_modules/dashdash": { "version": "1.14.1", @@ -3672,43 +5002,79 @@ } }, "node_modules/data-urls": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-4.0.0.tgz", - "integrity": "sha512-/mMTei/JXPqvFqQtfyTowxmJVwr2PVAeCcDxyFf6LhoOu/09TX2OX3kb2wzi4DMXcfj4OItwDOnhl5oziPnT6g==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz", + "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==", "dev": true, "dependencies": { - "abab": "^2.0.6", - "whatwg-mimetype": "^3.0.0", - "whatwg-url": "^12.0.0" + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^14.0.0" }, "engines": { - "node": ">=14" + "node": ">=18" } }, - "node_modules/date-time": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/date-time/-/date-time-3.1.0.tgz", - "integrity": "sha512-uqCUKXE5q1PNBXjPqvwhwJf9SwMoAHBgWJ6DcrnS5o+W2JOiIILl0JEdVD8SGujrNS02GGxgwAg2PN2zONgtjg==", + "node_modules/data-view-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", "dev": true, + "license": "MIT", "dependencies": { - "time-zone": "^1.0.0" + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" }, "engines": { - "node": ">=6" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/de-indent": { + "node_modules/data-view-byte-length": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/de-indent/-/de-indent-1.0.2.tgz", - "integrity": "sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg==", - "dev": true + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/inspect-js" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -3720,55 +5086,27 @@ } }, "node_modules/decimal.js": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", - "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", - "dev": true + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.5.0.tgz", + "integrity": "sha512-8vDa8Qxvr/+d94hSh5P3IJwI5t8/c0KsMp+g8bNw9cY2icONa5aPfvKeieW1WlG0WQYwwhJ7mjui2xtiePQSXw==", + "dev": true, + "license": "MIT" }, "node_modules/deep-eql": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", - "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", "dev": true, - "dependencies": { - "type-detect": "^4.0.0" - }, + "license": "MIT", "engines": { "node": ">=6" } }, - "node_modules/deep-equal": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.1.tgz", - "integrity": "sha512-lKdkdV6EOGoVn65XaOsPdH4rMxTZOnmFyuIkMjM1i5HHCbfjC97dawgTAy0deYNfuqUqW+Q5VrVaQYtUpSd6yQ==", - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "es-get-iterator": "^1.1.3", - "get-intrinsic": "^1.2.0", - "is-arguments": "^1.1.1", - "is-array-buffer": "^3.0.2", - "is-date-object": "^1.0.5", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "isarray": "^2.0.5", - "object-is": "^1.1.5", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.5.0", - "side-channel": "^1.0.4", - "which-boxed-primitive": "^1.0.2", - "which-collection": "^1.0.1", - "which-typed-array": "^1.1.9" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true }, "node_modules/deepmerge": { "version": "4.3.1", @@ -3779,13 +5117,14 @@ "node": ">=0.10.0" } }, - "node_modules/define-properties": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", - "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dependencies": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -3794,14 +5133,28 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/defined": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.1.tgz", - "integrity": "sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q==", + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "license": "MIT", + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==" + }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -3827,6 +5180,11 @@ "minimalistic-assert": "^1.0.0" } }, + "node_modules/destr": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/destr/-/destr-2.0.3.tgz", + "integrity": "sha512-2N3BOUU4gYMpTP24s5rF5iP7BDr7uNTCs4ozw3kf/eKfvWSIu93GEBi5m427YoyJoeOzQ5smuu4nNAPGb8idSQ==" + }, "node_modules/destroy": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", @@ -3836,31 +5194,23 @@ "npm": "1.2.8000 || >= 1.4.16" } }, - "node_modules/detect-node": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", - "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" - }, - "node_modules/detective": { - "version": "4.7.1", - "resolved": "https://registry.npmjs.org/detective/-/detective-4.7.1.tgz", - "integrity": "sha512-H6PmeeUcZloWtdt4DAkFyzFL94arpHr3NOwwmVILFiy+9Qd4JTxxXrzfyGk/lmct2qVGBwTSwSXagqu2BxmWig==", - "dependencies": { - "acorn": "^5.2.1", - "defined": "^1.0.0" - } - }, - "node_modules/detective/node_modules/acorn": { - "version": "5.7.4", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", - "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", + "node_modules/detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", + "optional": true, "bin": { - "acorn": "bin/acorn" + "detect-libc": "bin/detect-libc.js" }, "engines": { - "node": ">=0.4.0" + "node": ">=0.10" } }, + "node_modules/detect-node": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", + "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" + }, "node_modules/diffie-hellman": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", @@ -3876,51 +5226,61 @@ "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "license": "Apache-2.0", "dependencies": { "esutils": "^2.0.2" }, "engines": { - "node": ">=6.0.0" + "node": ">=0.10.0" } }, - "node_modules/domexception": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", - "integrity": "sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==", + "node_modules/domain-browser": { + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-4.23.0.tgz", + "integrity": "sha512-ArzcM/II1wCCujdCNyQjXrAFwS4mrLh4C7DZWlaI8mdh7h3BfKdNd3bKXITfl2PT9FtfQqaGvhi1vPRQPimjGA==", "dev": true, - "dependencies": { - "webidl-conversions": "^7.0.0" - }, "engines": { - "node": ">=12" + "node": ">=10" + }, + "funding": { + "url": "https://bevry.me/fund" } }, "node_modules/dotenv": { - "version": "16.3.2", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.2.tgz", - "integrity": "sha512-HTlk5nmhkm8F6JcdXvHIzaorzCoziNQT9mGxLPVXW8wJF1TiGSL60ZGB4gHWabHOaMmWmhvk2/lPHfnBiT78AQ==", + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", "engines": { "node": ">=12" }, "funding": { - "url": "https://github.com/motdotla/dotenv?sponsor=1" + "url": "https://dotenvx.com" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" } }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, "node_modules/ecc-jsbn": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", @@ -3987,16 +5347,15 @@ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "node_modules/electron-to-chromium": { - "version": "1.4.403", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.403.tgz", - "integrity": "sha512-evCMqXJWmbQHdlh307peXNguqVIMmcLGrQwXiR+Qc98js8jPDeT9rse1+EF2YRjWgueuzj1r4WWLAe4/U+xjMg==", - "dev": true, - "peer": true + "version": "1.5.55", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.55.tgz", + "integrity": "sha512-6maZ2ASDOTBtjt9FhqYPRnbvKU5tjG0IN9SztUOWYw2AzNDNpKJYLJmlK0/En4Hs/aiWnB+JZ+gW19PIGszgKg==" }, "node_modules/elliptic": { - "version": "6.5.4", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", - "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "version": "6.6.1", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.6.1.tgz", + "integrity": "sha512-RaddvvMatK2LJHqFJ+YA4WysVN5Ita9E35botqIYspQ4TkRAlCicdzKOjlyv/1Za5RyTNn7di//eEV0uTAfe3g==", + "license": "MIT", "dependencies": { "bn.js": "^4.11.9", "brorand": "^1.1.0", @@ -4015,7 +5374,8 @@ "node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true }, "node_modules/emojis-list": { "version": "3.0.0", @@ -4027,17 +5387,17 @@ } }, "node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", "engines": { "node": ">= 0.8" } }, "node_modules/enhanced-resolve": { - "version": "5.14.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.14.0.tgz", - "integrity": "sha512-+DCows0XNwLDcUhbFJPdlQEVnT2zXlCv7hPxemTz86/O+B/hCQ+mb7ydkPKiflpVraqLPCAfu7lDy+hBXueojw==", + "version": "5.17.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz", + "integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==", "dev": true, "peer": true, "dependencies": { @@ -4049,9 +5409,10 @@ } }, "node_modules/entities": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", - "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "license": "BSD-2-Clause", "engines": { "node": ">=0.12" }, @@ -4059,57 +5420,67 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/envify": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/envify/-/envify-3.4.1.tgz", - "integrity": "sha512-XLiBFsLtNF0MOZl+vWU59yPb3C2JtrQY2CNJn22KH75zPlHWY5ChcAQuf4knJeWT/lLkrx3sqvhP/J349bt4Bw==", - "dependencies": { - "jstransform": "^11.0.3", - "through": "~2.3.4" - }, - "bin": { - "envify": "bin/envify" - } - }, "node_modules/es-abstract": { - "version": "1.21.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz", - "integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==", - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "es-set-tostringtag": "^2.0.1", - "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.0", - "get-symbol-description": "^1.0.0", - "globalthis": "^1.0.3", - "gopd": "^1.0.1", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", + "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", + "is-data-view": "^1.0.2", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", - "typed-array-length": "^1.0.4", - "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.9" + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" }, "engines": { "node": ">= 0.4" @@ -4118,61 +5489,76 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es-get-iterator": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", - "integrity": "sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "has-symbols": "^1.0.3", - "is-arguments": "^1.1.1", - "is-map": "^2.0.2", - "is-set": "^2.0.2", - "is-string": "^1.0.7", - "isarray": "^2.0.5", - "stop-iteration-iterator": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" } }, "node_modules/es-module-lexer": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.2.1.tgz", - "integrity": "sha512-9978wrXM50Y4rTMmW5kXIC09ZdXQZqkE4mxhwkd8VbzsGkXGPgV4zWuqQJgCEzYngdo2dYDa0l8xhX4fkSwJSg==", - "dev": true, - "peer": true + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.5.4.tgz", + "integrity": "sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw==", + "dev": true + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } }, "node_modules/es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", "dependencies": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" } }, "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz", + "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==", + "dev": true, + "license": "MIT", "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "dev": true, + "license": "MIT", "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" }, "engines": { "node": ">= 0.4" @@ -4182,47 +5568,48 @@ } }, "node_modules/esbuild": { - "version": "0.19.11", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.11.tgz", - "integrity": "sha512-HJ96Hev2hX/6i5cDVwcqiJBBtuo9+FeIJOtZ9W1kA5M6AMJRHUZlpYZ1/SbEwtO0ioNAW8rUooVpC/WehY2SfA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.0.tgz", + "integrity": "sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==", "hasInstallScript": true, + "peer": true, "bin": { "esbuild": "bin/esbuild" }, "engines": { - "node": ">=12" + "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.19.11", - "@esbuild/android-arm": "0.19.11", - "@esbuild/android-arm64": "0.19.11", - "@esbuild/android-x64": "0.19.11", - "@esbuild/darwin-arm64": "0.19.11", - "@esbuild/darwin-x64": "0.19.11", - "@esbuild/freebsd-arm64": "0.19.11", - "@esbuild/freebsd-x64": "0.19.11", - "@esbuild/linux-arm": "0.19.11", - "@esbuild/linux-arm64": "0.19.11", - "@esbuild/linux-ia32": "0.19.11", - "@esbuild/linux-loong64": "0.19.11", - "@esbuild/linux-mips64el": "0.19.11", - "@esbuild/linux-ppc64": "0.19.11", - "@esbuild/linux-riscv64": "0.19.11", - "@esbuild/linux-s390x": "0.19.11", - "@esbuild/linux-x64": "0.19.11", - "@esbuild/netbsd-x64": "0.19.11", - "@esbuild/openbsd-x64": "0.19.11", - "@esbuild/sunos-x64": "0.19.11", - "@esbuild/win32-arm64": "0.19.11", - "@esbuild/win32-ia32": "0.19.11", - "@esbuild/win32-x64": "0.19.11" + "@esbuild/aix-ppc64": "0.24.0", + "@esbuild/android-arm": "0.24.0", + "@esbuild/android-arm64": "0.24.0", + "@esbuild/android-x64": "0.24.0", + "@esbuild/darwin-arm64": "0.24.0", + "@esbuild/darwin-x64": "0.24.0", + "@esbuild/freebsd-arm64": "0.24.0", + "@esbuild/freebsd-x64": "0.24.0", + "@esbuild/linux-arm": "0.24.0", + "@esbuild/linux-arm64": "0.24.0", + "@esbuild/linux-ia32": "0.24.0", + "@esbuild/linux-loong64": "0.24.0", + "@esbuild/linux-mips64el": "0.24.0", + "@esbuild/linux-ppc64": "0.24.0", + "@esbuild/linux-riscv64": "0.24.0", + "@esbuild/linux-s390x": "0.24.0", + "@esbuild/linux-x64": "0.24.0", + "@esbuild/netbsd-x64": "0.24.0", + "@esbuild/openbsd-arm64": "0.24.0", + "@esbuild/openbsd-x64": "0.24.0", + "@esbuild/sunos-x64": "0.24.0", + "@esbuild/win32-arm64": "0.24.0", + "@esbuild/win32-ia32": "0.24.0", + "@esbuild/win32-x64": "0.24.0" } }, "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "dev": true, + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "engines": { "node": ">=6" } @@ -4236,6 +5623,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, "engines": { "node": ">=10" }, @@ -4243,194 +5631,94 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/escodegen": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", - "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", - "dev": true, - "dependencies": { - "esprima": "^4.0.1", - "estraverse": "^5.2.0", - "esutils": "^2.0.2", - "optionator": "^0.8.1" - }, - "bin": { - "escodegen": "bin/escodegen.js", - "esgenerate": "bin/esgenerate.js" - }, - "engines": { - "node": ">=6.0" - }, - "optionalDependencies": { - "source-map": "~0.6.1" - } - }, - "node_modules/escodegen/node_modules/levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", - "dev": true, - "dependencies": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/escodegen/node_modules/optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", - "dev": true, - "dependencies": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/escodegen/node_modules/prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/escodegen/node_modules/type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", - "dev": true, - "dependencies": { - "prelude-ls": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, "node_modules/eslint": { - "version": "8.41.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.41.0.tgz", - "integrity": "sha512-WQDQpzGBOP5IrXPo4Hc0814r4/v2rrIsB0rhT7jtunIalgg6gYXWhRMOejVO8yH21T/FGaxjmFjBMNqcIlmH1Q==", - "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.0.3", - "@eslint/js": "8.41.0", - "@humanwhocodes/config-array": "^0.11.8", + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz", + "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.39.2", + "@eslint/plugin-kit": "^0.4.1", + "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", - "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.10.0", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.12.4", "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", + "cross-spawn": "^7.0.6", "debug": "^4.3.2", - "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.0", - "eslint-visitor-keys": "^3.4.1", - "espree": "^9.5.2", - "esquery": "^1.4.2", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", + "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", - "globals": "^13.19.0", - "graphemer": "^1.4.0", "ignore": "^5.2.0", - "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", - "is-path-inside": "^3.0.3", - "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", - "text-table": "^0.2.0" + "optionator": "^0.9.3" }, "bin": { "eslint": "bin/eslint.js" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-config-airbnb-base": { - "version": "15.0.0", - "resolved": "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz", - "integrity": "sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==", - "dependencies": { - "confusing-browser-globals": "^1.0.10", - "object.assign": "^4.1.2", - "object.entries": "^1.1.5", - "semver": "^6.3.0" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" + "url": "https://eslint.org/donate" }, "peerDependencies": { - "eslint": "^7.32.0 || ^8.2.0", - "eslint-plugin-import": "^2.25.2" - } - }, - "node_modules/eslint-config-airbnb-base/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/eslint-import-resolver-custom-alias": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-custom-alias/-/eslint-import-resolver-custom-alias-1.3.2.tgz", - "integrity": "sha512-wBPcZA2k6/IXaT8FsLMyiyVSG6WVEuaYIAbeKLXeGwr523BmeB9lKAAoLJWSqp3txsnU4gpkgD2x1q6K8k0uDQ==", - "dependencies": { - "glob-parent": "^6.0.2", - "resolve": "^1.22.2" + "jiti": "*" }, - "peerDependencies": { - "eslint-plugin-import": ">=2.2.0" + "peerDependenciesMeta": { + "jiti": { + "optional": true + } } }, "node_modules/eslint-import-resolver-node": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", - "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", + "dev": true, + "license": "MIT", "dependencies": { "debug": "^3.2.7", - "is-core-module": "^2.11.0", - "resolve": "^1.22.1" + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" } }, "node_modules/eslint-import-resolver-node/node_modules/debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, "node_modules/eslint-module-utils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", - "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz", + "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==", + "dev": true, + "license": "MIT", "dependencies": { "debug": "^3.2.7" }, @@ -4447,207 +5735,212 @@ "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, "node_modules/eslint-plugin-import": { - "version": "2.27.5", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz", - "integrity": "sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow==", + "version": "2.32.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz", + "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", + "dev": true, + "license": "MIT", "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flat": "^1.3.1", - "array.prototype.flatmap": "^1.3.1", + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.9", + "array.prototype.findlastindex": "^1.2.6", + "array.prototype.flat": "^1.3.3", + "array.prototype.flatmap": "^1.3.3", "debug": "^3.2.7", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.7", - "eslint-module-utils": "^2.7.4", - "has": "^1.0.3", - "is-core-module": "^2.11.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.12.1", + "hasown": "^2.0.2", + "is-core-module": "^2.16.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.values": "^1.1.6", - "resolve": "^1.22.1", - "semver": "^6.3.0", - "tsconfig-paths": "^3.14.1" + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.1", + "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.9", + "tsconfig-paths": "^3.15.0" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" } }, "node_modules/eslint-plugin-import/node_modules/debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, - "node_modules/eslint-plugin-import/node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/eslint-plugin-import/node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, - "node_modules/eslint-plugin-jsx-a11y": { - "version": "6.7.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.7.1.tgz", - "integrity": "sha512-63Bog4iIethyo8smBklORknVjB0T2dwB8Mr/hIC+fBS0uyHdYYpzM/Ed+YC8VxTjlXHEWFOdmgwcDn1U2L9VCA==", + "node_modules/eslint-plugin-jest": { + "version": "29.12.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-29.12.1.tgz", + "integrity": "sha512-Rxo7r4jSANMBkXLICJKS0gjacgyopfNAsoS0e3R9AHnjoKuQOaaPfmsDJPi8UWwygI099OV/K/JhpYRVkxD4AA==", + "dev": true, + "license": "MIT", "dependencies": { - "@babel/runtime": "^7.20.7", - "aria-query": "^5.1.3", - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "ast-types-flow": "^0.0.7", - "axe-core": "^4.6.2", - "axobject-query": "^3.1.1", - "damerau-levenshtein": "^1.0.8", - "emoji-regex": "^9.2.2", - "has": "^1.0.3", - "jsx-ast-utils": "^3.3.3", - "language-tags": "=1.0.5", - "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "semver": "^6.3.0" + "@typescript-eslint/utils": "^8.0.0" }, "engines": { - "node": ">=4.0" + "node": "^20.12.0 || ^22.0.0 || >=24.0.0" }, "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" - } - }, - "node_modules/eslint-plugin-jsx-a11y/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" + "@typescript-eslint/eslint-plugin": "^8.0.0", + "eslint": "^8.57.0 || ^9.0.0", + "jest": "*" + }, + "peerDependenciesMeta": { + "@typescript-eslint/eslint-plugin": { + "optional": true + }, + "jest": { + "optional": true + } } }, - "node_modules/eslint-plugin-react": { - "version": "7.32.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.32.2.tgz", - "integrity": "sha512-t2fBMa+XzonrrNkyVirzKlvn5RXzzPwRHtMvLAtVZrt8oxgnTQaYbU6SXTOO1mwQgp1y5+toMSKInnzGr0Knqg==", + "node_modules/eslint-plugin-prettier": { + "version": "5.5.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.5.5.tgz", + "integrity": "sha512-hscXkbqUZ2sPithAuLm5MXL+Wph+U7wHngPBv9OMWwlP8iaflyxpjTYZkmdgB4/vPIhemRlBEoLrH7UC1n7aUw==", + "dev": true, + "license": "MIT", "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", - "doctrine": "^2.1.0", - "estraverse": "^5.3.0", - "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", - "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.4", - "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.8" + "prettier-linter-helpers": "^1.0.1", + "synckit": "^0.11.12" }, "engines": { - "node": ">=4" + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-plugin-prettier" }, "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", + "prettier": ">=3.0.0" + }, + "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, + "eslint-config-prettier": { + "optional": true + } } }, - "node_modules/eslint-plugin-react/node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "node_modules/eslint-plugin-vue": { + "version": "9.33.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-9.33.0.tgz", + "integrity": "sha512-174lJKuNsuDIlLpjeXc5E2Tss8P44uIimAfGD0b90k0NoirJqpG7stLuU9Vp/9ioTOrQdWVREc4mRd1BD+CvGw==", + "dev": true, + "license": "MIT", "dependencies": { - "esutils": "^2.0.2" + "@eslint-community/eslint-utils": "^4.4.0", + "globals": "^13.24.0", + "natural-compare": "^1.4.0", + "nth-check": "^2.1.1", + "postcss-selector-parser": "^6.0.15", + "semver": "^7.6.3", + "vue-eslint-parser": "^9.4.3", + "xml-name-validator": "^4.0.0" }, "engines": { - "node": ">=0.10.0" + "node": "^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.2.0 || ^7.0.0 || ^8.0.0 || ^9.0.0" } }, - "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "node_modules/eslint-plugin-vue/node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "is-core-module": "^2.9.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" }, - "bin": { - "resolve": "bin/resolve" + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/eslint-plugin-react/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" + "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint-plugin-vue": { - "version": "9.20.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-9.20.1.tgz", - "integrity": "sha512-GyCs8K3lkEvoyC1VV97GJhP1SvqsKCiWGHnbn0gVUYiUhaH2+nB+Dv1uekv1THFMPbBfYxukrzQdltw950k+LQ==", + "node_modules/eslint-plugin-vue/node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "natural-compare": "^1.4.0", - "nth-check": "^2.1.1", - "postcss-selector-parser": "^6.0.13", - "semver": "^7.5.4", - "vue-eslint-parser": "^9.4.0", - "xml-name-validator": "^4.0.0" + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" }, "engines": { - "node": "^14.17.0 || >=16.0.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, - "peerDependencies": { - "eslint": "^6.2.0 || ^7.0.0 || ^8.0.0" + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint-plugin-vuejs-accessibility": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-vuejs-accessibility/-/eslint-plugin-vuejs-accessibility-1.2.0.tgz", - "integrity": "sha512-wF7kT22lS2VOmIpDeI65bnFFKFgESEEpI+CWKr43mdfDRywA4sCk7cKhtZsvfbPOtKO0GDlnpFxZbOIGsFn7IQ==", + "node_modules/eslint-plugin-vue/node_modules/vue-eslint-parser": { + "version": "9.4.3", + "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-9.4.3.tgz", + "integrity": "sha512-2rYRLWlIpaiN8xbPiDyXZXRgLGOtWxERV7ND5fFAv5qo1D2N9Fu9MNajBNc6o13lZ+24DAWCkQCvj4klgmcITg==", + "dev": true, + "license": "MIT", "dependencies": { - "aria-query": "^5.0.0", - "emoji-regex": "^10.0.0", - "vue-eslint-parser": "^9.0.1" + "debug": "^4.3.4", + "eslint-scope": "^7.1.1", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.3.1", + "esquery": "^1.4.0", + "lodash": "^4.17.21", + "semver": "^7.3.6" + }, + "engines": { + "node": "^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" }, "peerDependencies": { - "eslint": "^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": ">=6.0.0" } }, - "node_modules/eslint-plugin-vuejs-accessibility/node_modules/emoji-regex": { - "version": "10.2.1", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.2.1.tgz", - "integrity": "sha512-97g6QgOk8zlDRdgq1WxwgTMgEWGVAQvB5Fdpgc1MkNy56la5SKP9GsMXKDOdqwn90/41a8yPwIGk1Y6WVbeMQA==" - }, "node_modules/eslint-scope": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "peer": true, "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" @@ -4660,14 +5953,17 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "peer": true, "engines": { "node": ">=4.0" } }, "node_modules/eslint-visitor-keys": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", - "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -4676,31 +5972,61 @@ } }, "node_modules/eslint/node_modules/eslint-scope": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", - "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "url": "https://opencollective.com/eslint" } }, "node_modules/espree": { - "version": "9.5.2", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.2.tgz", - "integrity": "sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw==", + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "acorn": "^8.8.0", + "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.4.1" + "eslint-visitor-keys": "^4.2.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "url": "https://opencollective.com/eslint" @@ -4710,6 +6036,7 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "license": "BSD-2-Clause", "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" @@ -4718,22 +6045,12 @@ "node": ">=4" } }, - "node_modules/esprima-fb": { - "version": "15001.1.0-dev-harmony-fb", - "resolved": "https://registry.npmjs.org/esprima-fb/-/esprima-fb-15001.1.0-dev-harmony-fb.tgz", - "integrity": "sha512-59dDGQo2b3M/JfKIws0/z8dcXH2mnVHkfSPRhCYS91JNGfGNwr7GsSF6qzWZuOGvw5Ii0w9TtylrX07MGmlOoQ==", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", "dependencies": { "estraverse": "^5.1.0" }, @@ -4745,6 +6062,7 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, "dependencies": { "estraverse": "^5.2.0" }, @@ -4756,6 +6074,7 @@ "version": "5.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, "engines": { "node": ">=4.0" } @@ -4769,6 +6088,8 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=0.10.0" } @@ -4799,45 +6120,59 @@ "safe-buffer": "^5.1.1" } }, + "node_modules/expect-type": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.1.0.tgz", + "integrity": "sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==", + "dev": true, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.22.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz", + "integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==", + "license": "MIT", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", - "content-disposition": "0.5.4", + "body-parser": "~1.20.3", + "content-disposition": "~0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", - "cookie-signature": "1.0.6", + "cookie": "~0.7.1", + "cookie-signature": "~1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.2.0", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "finalhandler": "~1.3.1", + "fresh": "~0.5.2", + "http-errors": "~2.0.0", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", - "on-finished": "2.4.1", + "on-finished": "~2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "~0.1.12", "proxy-addr": "~2.0.7", - "qs": "6.11.0", + "qs": "~6.14.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "~0.19.0", + "serve-static": "~1.16.2", "setprototypeof": "1.2.0", - "statuses": "2.0.1", + "statuses": "~2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" }, "engines": { "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/express/node_modules/debug": { @@ -4853,10 +6188,26 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, + "node_modules/express/node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/extend-shallow": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "license": "MIT", "dependencies": { "is-extendable": "^0.1.0" }, @@ -4867,24 +6218,26 @@ "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true }, "node_modules/fast-diff": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", - "dev": true + "dev": true, + "license": "Apache-2.0" }, "node_modules/fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", - "micromatch": "^4.0.4" + "micromatch": "^4.0.8" }, "engines": { "node": ">=8.6.0" @@ -4904,12 +6257,14 @@ "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true }, "node_modules/fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true }, "node_modules/fastq": { "version": "1.15.0", @@ -4919,38 +6274,35 @@ "reusify": "^1.0.4" } }, - "node_modules/fbjs": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-0.6.1.tgz", - "integrity": "sha512-4KW7tT33ytfazK3Ekvesbsa4A5J79hUrdXONQGZ0wM6i3PFc70YknF9kj1eyx3mDupgJ7Z+ifFhcMJ+ps2eZIw==", - "dependencies": { - "core-js": "^1.0.0", - "loose-envify": "^1.0.0", - "promise": "^7.0.3", - "ua-parser-js": "^0.7.9", - "whatwg-fetch": "^0.9.0" - } - }, - "node_modules/fbjs/node_modules/whatwg-fetch": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-0.9.0.tgz", - "integrity": "sha512-DIuh7/cloHxHYwS/oRXGgkALYAntijL63nsgMQsNSnBj825AysosAqA2ZbYXGRqpPRiNH7335dTqV364euRpZw==" + "node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "dev": true }, "node_modules/file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", "dependencies": { - "flat-cache": "^3.0.4" + "flat-cache": "^4.0.0" }, "engines": { - "node": "^10.12.0 || >=12.0.0" + "node": ">=16.0.0" } }, + "node_modules/file-saver": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/file-saver/-/file-saver-2.0.5.tgz", + "integrity": "sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==", + "license": "MIT" + }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dependencies": { "to-regex-range": "^5.0.1" }, @@ -4959,12 +6311,12 @@ } }, "node_modules/finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", "dependencies": { "debug": "2.6.9", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", @@ -4992,6 +6344,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -5004,26 +6357,29 @@ } }, "node_modules/flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", "dependencies": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" + "flatted": "^3.2.9", + "keyv": "^4.5.4" }, "engines": { - "node": "^10.12.0 || >=12.0.0" + "node": ">=16" } }, "node_modules/flatted": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", - "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", + "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==", + "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.4", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", - "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", @@ -5040,38 +6396,35 @@ } }, "node_modules/font-logos": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/font-logos/-/font-logos-1.1.1.tgz", - "integrity": "sha512-StMCMzon9lg6apaQXKO1pozKtVciCuBUw9iAr6snnBFPHcPtnnJe8W113D50aAdKFY01l0Hkm3sAW2KdSppmGA==" + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/font-logos/-/font-logos-1.3.0.tgz", + "integrity": "sha512-YGqFNu8+0bWFTU7bh3aveSl4CSsUEOEPEkRW4tP1EnmLHzAunpeHagD3ICVttn7/oPHRWO/UStMkS/tYTZTt5g==" }, "node_modules/for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", - "dependencies": { - "is-callable": "^1.1.3" - } - }, - "node_modules/foreground-child": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", - "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", - "dev": true, + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "license": "MIT", "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^3.0.2" + "is-callable": "^1.2.7" }, "engines": { - "node": ">=8.0.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -5094,10 +6447,33 @@ "node": ">= 0.6" } }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true }, "node_modules/fsevents": { "version": "2.3.3", @@ -5113,19 +6489,26 @@ } }, "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" }, "engines": { "node": ">= 0.4" @@ -5138,57 +6521,65 @@ "version": "1.2.3", "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-func-name": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", - "integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==", - "dev": true, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "engines": { - "node": "*" + "node": ">=6.9.0" } }, "node_modules/get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-stdin": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", - "integrity": "sha512-F5aQMywwJ2n85s4hJPTT9RPxGmubonuB10MNYo17/xph174n2MIR33HRguhzVag10O/npM7SPk73LMZNP+FaWw==", + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" } }, "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6" }, "engines": { "node": ">= 0.4" @@ -5205,30 +6596,29 @@ "assert-plus": "^1.0.0" } }, - "node_modules/gitter-sidecar": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/gitter-sidecar/-/gitter-sidecar-1.5.0.tgz", - "integrity": "sha512-9A0shJlhZF4b0SzU9bVmmNnPA8oQxhyQ8hjR75MovxZvApS1fYGTElhgYyKMC1HUa9BKTCdsJtoutce5u0saNA==", - "dependencies": { - "keymirror": "^0.1.1", - "marked": "^0.3.5", - "react": "^0.14.7", - "react-dom": "^0.14.7", - "react-redux": "^4.4.0", - "redux": "^3.3.0", - "semver": "^5.1.0", - "strip-indent": "^1.0.1", - "whatwg-fetch": "^0.11.0" - } - }, - "node_modules/gitter-sidecar/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "node_modules/giget": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/giget/-/giget-1.2.4.tgz", + "integrity": "sha512-Wv+daGyispVoA31TrWAVR+aAdP7roubTPEM/8JzRnqXhLbdJH0T9eQyXVFF8fjk3WKTsctII6QcyxILYgNp2DA==", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "defu": "^6.1.4", + "node-fetch-native": "^1.6.6", + "nypm": "^0.5.1", + "ohash": "^1.1.4", + "pathe": "^2.0.2", + "tar": "^6.2.1" + }, "bin": { - "semver": "bin/semver" + "giget": "dist/cli.mjs" } }, + "node_modules/giget/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, "node_modules/glob": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", @@ -5252,6 +6642,7 @@ "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, "dependencies": { "is-glob": "^4.0.3" }, @@ -5288,9 +6679,10 @@ } }, "node_modules/globals": { - "version": "13.20.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", - "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, "dependencies": { "type-fest": "^0.20.2" }, @@ -5302,11 +6694,14 @@ } }, "node_modules/globalthis": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", - "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "dev": true, + "license": "MIT", "dependencies": { - "define-properties": "^1.1.3" + "define-properties": "^1.2.1", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -5315,31 +6710,12 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dependencies": { - "get-intrinsic": "^1.1.3" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5348,17 +6724,15 @@ "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" - }, - "node_modules/graphemer": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==" + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "peer": true }, "node_modules/gray-matter": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", + "license": "MIT", "dependencies": { "js-yaml": "^3.13.1", "kind-of": "^6.0.2", @@ -5373,14 +6747,16 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "license": "MIT", "dependencies": { "sprintf-js": "~1.0.2" } }, "node_modules/gray-matter/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz", + "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", + "license": "MIT", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -5389,21 +6765,15 @@ "js-yaml": "bin/js-yaml.js" } }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, "node_modules/has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", + "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -5412,25 +6782,31 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, "engines": { "node": ">=8" } }, "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dependencies": { - "get-intrinsic": "^1.1.1" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -5439,9 +6815,9 @@ } }, "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", "engines": { "node": ">= 0.4" }, @@ -5450,11 +6826,11 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -5485,13 +6861,15 @@ "minimalistic-assert": "^1.0.1" } }, - "node_modules/he": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", - "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", - "dev": true, - "bin": { - "he": "bin/he" + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/hmac-drbg": { @@ -5504,24 +6882,21 @@ "minimalistic-crypto-utils": "^1.0.1" } }, - "node_modules/hoist-non-react-statics": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", - "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "dependencies": { - "react-is": "^16.7.0" - } + "node_modules/hookable": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-5.5.3.tgz", + "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==" }, "node_modules/html-encoding-sniffer": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", - "integrity": "sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", + "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==", "dev": true, "dependencies": { - "whatwg-encoding": "^2.0.0" + "whatwg-encoding": "^3.1.1" }, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/html-escaper": { @@ -5546,30 +6921,35 @@ } }, "node_modules/http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", "dev": true, "dependencies": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" + "agent-base": "^7.1.0", + "debug": "^4.3.4" }, "engines": { - "node": ">= 6" + "node": ">= 14" } }, + "node_modules/https-browserify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz", + "integrity": "sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg==", + "dev": true + }, "node_modules/https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "dev": true, "dependencies": { - "agent-base": "6", + "agent-base": "^7.1.2", "debug": "4" }, "engines": { - "node": ">= 6" + "node": ">= 14" } }, "node_modules/iconv-lite": { @@ -5604,22 +6984,32 @@ ] }, "node_modules/ignore": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", - "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", "engines": { "node": ">= 4" } }, + "node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "license": "MIT" + }, "node_modules/immutable": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/immutable/-/immutable-4.3.0.tgz", - "integrity": "sha512-0AOCmOip+xgJwEVTQj1EfiDDOkPmuyllDuTuEX+DDXUgapLAsBIfkg3sxCYyCEA8mQqZrrxPUGjcOQ2JS3WLkg==" + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-5.0.2.tgz", + "integrity": "sha512-1NU7hWZDkV7hJ4PJ9dur9gTNQ4ePNPN4k9/0YhwjzykTi/+3Q5pF93YU5QoVj8BuOnhLgaY8gs0U2pj4kSYVcw==" }, "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -5635,6 +7025,7 @@ "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, "engines": { "node": ">=0.8.19" } @@ -5643,6 +7034,7 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, "dependencies": { "once": "^1.3.0", "wrappy": "1" @@ -5660,13 +7052,15 @@ "dev": true }, "node_modules/internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "dev": true, + "license": "MIT", "dependencies": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", - "side-channel": "^1.0.4" + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -5681,14 +7075,6 @@ "node": ">= 0.10" } }, - "node_modules/invariant": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", - "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "dependencies": { - "loose-envify": "^1.0.0" - } - }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -5713,47 +7099,68 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-bigint": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", + "dev": true, + "license": "MIT", "dependencies": { - "has-bigints": "^1.0.1" + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", + "dev": true, + "license": "MIT", "dependencies": { - "binary-extensions": "^2.0.0" + "has-bigints": "^1.0.2" }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -5785,21 +7192,6 @@ "node": ">=4" } }, - "node_modules/is-builtin-module": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", - "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", - "dev": true, - "dependencies": { - "builtin-modules": "^3.3.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-callable": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", @@ -5812,22 +7204,48 @@ } }, "node_modules/is-core-module": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.1.tgz", - "integrity": "sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg==", + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", + "dev": true, + "license": "MIT", "dependencies": { - "has": "^1.0.3" + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "dev": true, + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -5840,6 +7258,7 @@ "version": "0.1.1", "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -5852,6 +7271,22 @@ "node": ">=0.10.0" } }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -5887,9 +7322,14 @@ } }, "node_modules/is-map": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", - "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -5916,9 +7356,11 @@ } }, "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -5935,11 +7377,14 @@ } }, "node_modules/is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "dev": true, + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -5948,14 +7393,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", @@ -5963,12 +7400,16 @@ "dev": true }, "node_modules/is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -5978,30 +7419,43 @@ } }, "node_modules/is-set": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", - "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2" + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "dev": true, + "license": "MIT", "dependencies": { - "has-tostringtag": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -6011,11 +7465,15 @@ } }, "node_modules/is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", + "dev": true, + "license": "MIT", "dependencies": { - "has-symbols": "^1.0.2" + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -6025,15 +7483,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", - "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "license": "MIT", "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "which-typed-array": "^1.1.16" }, "engines": { "node": ">= 0.4" @@ -6043,36 +7498,63 @@ } }, "node_modules/is-weakmap": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", - "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2" + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakset": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", - "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-what": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-5.5.0.tgz", + "integrity": "sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, "node_modules/isarray": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", @@ -6081,35 +7563,59 @@ "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/isomorphic-timers-promises": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-timers-promises/-/isomorphic-timers-promises-1.0.1.tgz", + "integrity": "sha512-u4sej9B1LPSxTGKB/HiuzvEQnXH0ECYkSVQU39koSwmFAxhlEAFl9RdTvLv4TOTQUgBS5O3O5fwUxk6byBZ+IQ==", + "dev": true, + "engines": { + "node": ">=10" + } }, "node_modules/istanbul-lib-coverage": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", - "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/istanbul-lib-report": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", - "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", "dev": true, "dependencies": { "istanbul-lib-coverage": "^3.0.0", - "make-dir": "^3.0.0", + "make-dir": "^4.0.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" + }, + "engines": { + "node": ">=10" } }, "node_modules/istanbul-reports": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz", - "integrity": "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==", + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", "dev": true, "dependencies": { "html-escaper": "^2.0.0", @@ -6119,6 +7625,21 @@ "node": ">=8" } }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, "node_modules/jest-worker": { "version": "27.5.1", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", @@ -6150,6 +7671,14 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/jiti": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", + "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, "node_modules/js-beautify": { "version": "1.14.9", "resolved": "https://registry.npmjs.org/js-beautify/-/js-beautify-1.14.9.tgz", @@ -6170,24 +7699,17 @@ "node": ">=12" } }, - "node_modules/js-string-escape": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", - "integrity": "sha512-Smw4xcfIQ5LVjAOuJCvN/zIodzA/BBSsluuoSykP+lUvScIi4U6RJLfwHet5cxFnCswUjISV8oAXaqaJDY3chg==", - "dev": true, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dev": true, + "license": "MIT", "dependencies": { "argparse": "^2.0.1" }, @@ -6201,43 +7723,38 @@ "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" }, "node_modules/jsdom": { - "version": "21.1.2", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-21.1.2.tgz", - "integrity": "sha512-sCpFmK2jv+1sjff4u7fzft+pUh2KSUbUrEHYHyfSIbGTIcmnjyp83qg6qLwdJ/I3LpTXx33ACxeRL7Lsyc6lGQ==", - "dev": true, - "dependencies": { - "abab": "^2.0.6", - "acorn": "^8.8.2", - "acorn-globals": "^7.0.0", - "cssstyle": "^3.0.0", - "data-urls": "^4.0.0", - "decimal.js": "^10.4.3", - "domexception": "^4.0.0", - "escodegen": "^2.0.0", - "form-data": "^4.0.0", - "html-encoding-sniffer": "^3.0.0", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.1", + "version": "26.1.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-26.1.0.tgz", + "integrity": "sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssstyle": "^4.2.1", + "data-urls": "^5.0.0", + "decimal.js": "^10.5.0", + "html-encoding-sniffer": "^4.0.0", + "http-proxy-agent": "^7.0.2", + "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", - "nwsapi": "^2.2.4", - "parse5": "^7.1.2", - "rrweb-cssom": "^0.6.0", + "nwsapi": "^2.2.16", + "parse5": "^7.2.1", + "rrweb-cssom": "^0.8.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", - "tough-cookie": "^4.1.2", - "w3c-xmlserializer": "^4.0.0", + "tough-cookie": "^5.1.1", + "w3c-xmlserializer": "^5.0.0", "webidl-conversions": "^7.0.0", - "whatwg-encoding": "^2.0.0", - "whatwg-mimetype": "^3.0.0", - "whatwg-url": "^12.0.1", - "ws": "^8.13.0", - "xml-name-validator": "^4.0.0" + "whatwg-encoding": "^3.1.1", + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^14.1.1", + "ws": "^8.18.0", + "xml-name-validator": "^5.0.0" }, "engines": { - "node": ">=14" + "node": ">=18" }, "peerDependencies": { - "canvas": "^2.5.0" + "canvas": "^3.0.0" }, "peerDependenciesMeta": { "canvas": { @@ -6245,18 +7762,33 @@ } } }, - "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "node_modules/jsdom/node_modules/xml-name-validator": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", + "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==", "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/jsesc": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", "bin": { "jsesc": "bin/jsesc" }, "engines": { - "node": ">=4" + "node": ">=6" } }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", @@ -6267,17 +7799,21 @@ "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==" + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true }, "node_modules/json5": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "license": "MIT", "dependencies": { "minimist": "^1.2.0" }, @@ -6285,91 +7821,43 @@ "json5": "lib/cli.js" } }, - "node_modules/jsonc-parser": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", - "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", - "dev": true - }, - "node_modules/jstransform": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/jstransform/-/jstransform-11.0.3.tgz", - "integrity": "sha512-LGm87w0A8E92RrcXt94PnNHkFqHmgDy3mKHvNZOG7QepKCTCH/VB6S+IEN+bT4uLN3gVpOT0vvOOVd96osG71g==", - "dependencies": { - "base62": "^1.1.0", - "commoner": "^0.10.1", - "esprima-fb": "^15001.1.0-dev-harmony-fb", - "object-assign": "^2.0.0", - "source-map": "^0.4.2" - }, - "bin": { - "jstransform": "bin/jstransform" - }, - "engines": { - "node": ">=0.8.8" - } - }, - "node_modules/jstransform/node_modules/object-assign": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-2.1.1.tgz", - "integrity": "sha512-CdsOUYIh5wIiozhJ3rLQgmUTgcyzFwZZrqhkKhODMoGtPKM+wt0h0CNIoauJWMsS9822EdzPsF/6mb4nLvPN5g==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/jstransform/node_modules/source-map": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz", - "integrity": "sha512-Y8nIfcb1s/7DcobUz1yOO1GSp7gyL+D9zLHDehT7iRESqGSxjJ448Sg7rvfgsRJCnKLdSl11uGf0s9X80cH0/A==", - "dependencies": { - "amdefine": ">=0.0.4" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/jsx-ast-utils": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", - "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", "dependencies": { - "array-includes": "^3.1.5", - "object.assign": "^4.1.3" - }, - "engines": { - "node": ">=4.0" + "json-buffer": "3.0.1" } }, - "node_modules/keymirror": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/keymirror/-/keymirror-0.1.1.tgz", - "integrity": "sha512-vIkZAFWoDijgQT/Nvl2AHCMmnegN2ehgTPYuyy2hWQkQSntI0S7ESYqdLkoSe1HyEBFHHkCgSIvVdSEiWwKvCg==" - }, "node_modules/kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/language-subtag-registry": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", - "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==" - }, - "node_modules/language-tags": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", - "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", - "dependencies": { - "language-subtag-registry": "~0.3.2" + "node_modules/klona": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/klona/-/klona-2.0.6.tgz", + "integrity": "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA==", + "engines": { + "node": ">= 8" } }, + "node_modules/knitwork": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/knitwork/-/knitwork-1.2.0.tgz", + "integrity": "sha512-xYSH7AvuQ6nXkq42x0v5S8/Iry+cfulBz/DJQzhIyESdLD7425jXsPy4vn5cCXU+HhRN2kVw51Vd1K6/By4BQg==" + }, "node_modules/levn": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" @@ -6378,12 +7866,22 @@ "node": ">= 0.8.0" } }, + "node_modules/lie": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.1.1.tgz", + "integrity": "sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==", + "license": "MIT", + "dependencies": { + "immediate": "~3.0.5" + } + }, "node_modules/linkify-it": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-4.0.1.tgz", - "integrity": "sha512-C7bfi1UZmoj8+PQx22XyeXCuBlokoyWQL5pWSP+EI6nzRylyThouddufc2c1NDIcP9k5agmN9fLpA7VNJfIiqw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "license": "MIT", "dependencies": { - "uc.micro": "^1.0.1" + "uc.micro": "^2.0.0" } }, "node_modules/loader-runner": { @@ -6423,10 +7921,13 @@ } }, "node_modules/local-pkg": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.3.tgz", - "integrity": "sha512-SFppqq5p42fe2qcZQqqEOiVRXl+WCP1MdT6k7BDEW1j++sp5fIY+/fdRQitvKgB5BrBcmrs5m/L0v2FrU5MY1g==", - "dev": true, + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-1.0.0.tgz", + "integrity": "sha512-bbgPw/wmroJsil/GgL4qjDzs5YLTBMQ99weRsok1XCDccQeehbHA/I1oRvk2NPtr7KGZgT/Y5tPRnAtMqeG2Kg==", + "dependencies": { + "mlly": "^1.7.3", + "pkg-types": "^1.3.0" + }, "engines": { "node": ">=14" }, @@ -6434,10 +7935,20 @@ "url": "https://github.com/sponsors/antfu" } }, + "node_modules/localforage": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/localforage/-/localforage-1.10.0.tgz", + "integrity": "sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==", + "license": "Apache-2.0", + "dependencies": { + "lie": "3.1.1" + } + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, "dependencies": { "p-locate": "^5.0.0" }, @@ -6449,111 +7960,108 @@ } }, "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "dev": true, + "license": "MIT" }, "node_modules/lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true }, "node_modules/loupe": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.6.tgz", - "integrity": "sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", + "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dependencies": { - "get-func-name": "^2.0.0" + "yallist": "^3.0.2" } }, + "node_modules/lru-cache/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, "node_modules/magic-string": { - "version": "0.30.5", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.5.tgz", - "integrity": "sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==", + "version": "0.30.17", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", + "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.4.15" - }, - "engines": { - "node": ">=12" + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, + "node_modules/magicast": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.5.tgz", + "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==", + "devOptional": true, + "dependencies": { + "@babel/parser": "^7.25.4", + "@babel/types": "^7.25.4", + "source-map-js": "^1.2.0" } }, "node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", "dev": true, "dependencies": { - "semver": "^6.0.0" + "semver": "^7.5.3" }, "engines": { - "node": ">=8" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/make-dir/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/markdown-it": { - "version": "13.0.2", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-13.0.2.tgz", - "integrity": "sha512-FtwnEuuK+2yVU7goGn/MJ0WBZMM9ZPgU9spqlFs7/A/pDIUNSOQZhUgOqYCficIuR2QaFnrt8LHqBWsbTAoI5w==", + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "license": "MIT", "dependencies": { "argparse": "^2.0.1", - "entities": "~3.0.1", - "linkify-it": "^4.0.1", - "mdurl": "^1.0.1", - "uc.micro": "^1.0.5" + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" }, "bin": { - "markdown-it": "bin/markdown-it.js" + "markdown-it": "bin/markdown-it.mjs" } }, - "node_modules/marked": { - "version": "0.3.19", - "resolved": "https://registry.npmjs.org/marked/-/marked-0.3.19.tgz", - "integrity": "sha512-ea2eGWOqNxPcXv8dyERdSr/6FmzvWwzjMxpfGB/sbMccXoct+xY+YukPD+QTUZwyvK7BZwcr4m21WBOW41pAkg==", - "bin": { - "marked": "bin/marked" + "node_modules/markdown-it-async": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/markdown-it-async/-/markdown-it-async-2.2.0.tgz", + "integrity": "sha512-sITME+kf799vMeO/ww/CjH6q+c05f6TLpn6VOmmWCGNqPJzSh+uFgZoMB9s0plNtW6afy63qglNAC3MhrhP/gg==", + "license": "MIT", + "dependencies": { + "@types/markdown-it": "^14.1.2", + "markdown-it": "^14.1.0" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/antfu" } }, - "node_modules/md5-hex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/md5-hex/-/md5-hex-3.0.1.tgz", - "integrity": "sha512-BUiRtTtV39LIJwinWBjqVsU9xhdnz7/i889V859IBFpuqGAj6LuOvHv5XLbgZ2R7ptJoJaEcxkv88/h25T7Ciw==", - "dev": true, - "dependencies": { - "blueimp-md5": "^2.10.0" - }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", "engines": { - "node": ">=8" + "node": ">= 0.4" } }, "node_modules/md5.js": { @@ -6567,9 +8075,10 @@ } }, "node_modules/mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "license": "MIT" }, "node_modules/media-typer": { "version": "0.3.0", @@ -6580,9 +8089,12 @@ } }, "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/merge-stream": { "version": "2.0.0", @@ -6608,11 +8120,11 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -6680,6 +8192,7 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -6691,33 +8204,63 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", "dependencies": { - "minimist": "^1.2.6" + "minipass": "^3.0.0", + "yallist": "^4.0.0" }, - "bin": { - "mkdirp": "bin/cmd.js" + "engines": { + "node": ">= 8" } }, + "node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mitt": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz", + "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==" + }, "node_modules/mlly": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.5.0.tgz", - "integrity": "sha512-NPVQvAY1xr1QoVeG0cy8yUYC7FQcOx6evl/RjT1wL5FvzPnzOysoqB/jmx/DhssT2dYa8nxECLAaFI/+gVLhDQ==", - "dev": true, + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.4.tgz", + "integrity": "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==", "dependencies": { - "acorn": "^8.11.3", - "pathe": "^1.1.2", - "pkg-types": "^1.0.3", - "ufo": "^1.3.2" + "acorn": "^8.14.0", + "pathe": "^2.0.1", + "pkg-types": "^1.3.0", + "ufo": "^1.5.4" } }, + "node_modules/mlly/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, "node_modules/moment": { "version": "2.30.1", "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", @@ -6736,29 +8279,30 @@ } }, "node_modules/mrmime": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz", - "integrity": "sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.0.tgz", + "integrity": "sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==", "dev": true, "engines": { "node": ">=10" } }, "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/muggle-string": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/muggle-string/-/muggle-string-0.3.1.tgz", - "integrity": "sha512-ckmWDJjphvd/FvZawgygcUeQCxzvohjFO5RxTjj4eq8kw359gFF3E1brjfI+viLMxss5JrHTDRHZvu2/tuy0Qg==", - "dev": true + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/muggle-string/-/muggle-string-0.4.1.tgz", + "integrity": "sha512-VNTrAak/KhO2i8dqqnqnAHOa3cYBwXEZe9h+D5h/1ZqFSTEFHdM65lR7RoIqq3tBBYavsOXV84NoHXZ0AkPyqQ==", + "dev": true, + "license": "MIT" }, "node_modules/nanoid": { - "version": "3.3.7", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", - "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "version": "3.3.8", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", + "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", "funding": [ { "type": "github", @@ -6775,12 +8319,8 @@ "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==" - }, - "node_modules/natural-compare-lite": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", - "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==" + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true }, "node_modules/negotiator": { "version": "0.6.3", @@ -6797,12 +8337,21 @@ "dev": true, "peer": true }, + "node_modules/node-addon-api": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", + "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", + "optional": true + }, + "node_modules/node-fetch-native": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.6.tgz", + "integrity": "sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==" + }, "node_modules/node-releases": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.11.tgz", - "integrity": "sha512-+M0PwXeU80kRohZ3aT4J/OnR+l9/KD2nVLNNoRgFtnf+umQVFdGBAO2N8+nCnEi0xlh/Wk3zOGC+vNNx+uM79Q==", - "dev": true, - "peer": true + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==" }, "node_modules/node-rsa": { "version": "1.1.1", @@ -6812,6 +8361,74 @@ "asn1": "^0.2.4" } }, + "node_modules/node-stdlib-browser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/node-stdlib-browser/-/node-stdlib-browser-1.2.0.tgz", + "integrity": "sha512-VSjFxUhRhkyed8AtLwSCkMrJRfQ3e2lGtG3sP6FEgaLKBBbxM/dLfjRe1+iLhjvyLFW3tBQ8+c0pcOtXGbAZJg==", + "dev": true, + "dependencies": { + "assert": "^2.0.0", + "browser-resolve": "^2.0.0", + "browserify-zlib": "^0.2.0", + "buffer": "^5.7.1", + "console-browserify": "^1.1.0", + "constants-browserify": "^1.0.0", + "create-require": "^1.1.1", + "crypto-browserify": "^3.11.0", + "domain-browser": "^4.22.0", + "events": "^3.0.0", + "https-browserify": "^1.0.0", + "isomorphic-timers-promises": "^1.0.1", + "os-browserify": "^0.3.0", + "path-browserify": "^1.0.1", + "pkg-dir": "^5.0.0", + "process": "^0.11.10", + "punycode": "^1.4.1", + "querystring-es3": "^0.2.1", + "readable-stream": "^3.6.0", + "stream-browserify": "^3.0.0", + "stream-http": "^3.2.0", + "string_decoder": "^1.0.0", + "timers-browserify": "^2.0.4", + "tty-browserify": "0.0.1", + "url": "^0.11.0", + "util": "^0.12.4", + "vm-browserify": "^1.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-stdlib-browser/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/node-stdlib-browser/node_modules/punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", + "dev": true + }, "node_modules/nopt": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-6.0.0.tgz", @@ -6827,18 +8444,11 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/nth-check": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dev": true, "dependencies": { "boolbase": "^1.0.0" }, @@ -6867,23 +8477,43 @@ } }, "node_modules/nwsapi": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.4.tgz", - "integrity": "sha512-NHj4rzRo0tQdijE9ZqAx6kYDcoRwYwSYzCA8MY3JzfxlrvEU0jhnhJT9BhqhJs7I/dKcrDm6TyulaRqZPIhN5g==", + "version": "2.2.20", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.20.tgz", + "integrity": "sha512-/ieB+mDe4MrrKMT8z+mQL8klXydZWGR5Dowt4RAGKbJ3kIGEx3X4ljUo+6V73IXtUPWgfOlU5B9MlGxFO5T+cA==", "dev": true }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "node_modules/nypm": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.5.2.tgz", + "integrity": "sha512-AHzvnyUJYSrrphPhRWWZNcoZfArGNp3Vrc4pm/ZurO74tYNTgAPrEyBQEKy+qioqmWlPXwvMZCG2wOaHlPG0Pw==", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "pathe": "^2.0.2", + "pkg-types": "^1.3.1", + "tinyexec": "^0.3.2", + "ufo": "^1.5.4" + }, + "bin": { + "nypm": "dist/cli.mjs" + }, "engines": { - "node": ">=0.10.0" + "node": "^14.16.0 || >=16.10.0" } }, + "node_modules/nypm/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, "node_modules/object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -6912,13 +8542,16 @@ } }, "node_modules/object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", "object-keys": "^1.1.1" }, "engines": { @@ -6928,27 +8561,17 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.entries": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz", - "integrity": "sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/object.fromentries": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", - "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -6957,26 +8580,32 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.hasown": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz", - "integrity": "sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==", + "node_modules/object.groupby": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", + "dev": true, + "license": "MIT", "dependencies": { - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">= 0.4" } }, "node_modules/object.values": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", - "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz", + "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -6985,6 +8614,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/ohash": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/ohash/-/ohash-1.1.4.tgz", + "integrity": "sha512-FlDryZAahJmEF3VR3w1KogSEdWX3WhA5GPakFx4J81kEAiHyLMpdLLElS8n8dfNadMgAne/MywcvmogzscVt4g==" + }, "node_modules/on-finished": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", @@ -7000,30 +8634,57 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, "dependencies": { "wrappy": "1" } }, "node_modules/optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", - "word-wrap": "^1.2.3" + "word-wrap": "^1.2.5" }, "engines": { "node": ">= 0.8.0" } }, + "node_modules/os-browserify": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", + "integrity": "sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A==", + "dev": true + }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, "dependencies": { "yocto-queue": "^0.1.0" }, @@ -7038,6 +8699,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, "dependencies": { "p-limit": "^3.0.2" }, @@ -7048,10 +8710,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", "dependencies": { "callsites": "^3.0.0" }, @@ -7072,29 +8748,17 @@ } }, "node_modules/parse5": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", - "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.2.1.tgz", + "integrity": "sha512-BuBYQYlv1ckiPdQi/ohiivi9Sagc9JG+Ozs0r7b/0iK3sKmrb0b9FdWdBbOdx6hBCM/F9Ir82ofnBhtZOjCRPQ==", "dev": true, "dependencies": { - "entities": "^4.4.0" + "entities": "^4.5.0" }, "funding": { "url": "https://github.com/inikulin/parse5?sponsor=1" } }, - "node_modules/parse5/node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "dev": true, - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -7113,6 +8777,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, "engines": { "node": ">=8" } @@ -7121,6 +8786,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -7129,6 +8795,7 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, "engines": { "node": ">=8" } @@ -7136,54 +8803,108 @@ "node_modules/path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" - }, - "node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, "engines": { - "node": ">=8" + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==" + }, "node_modules/pathe": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==" }, "node_modules/pathval": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", - "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", + "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", "dev": true, + "license": "MIT", "engines": { - "node": "*" + "node": ">= 14.16" } }, "node_modules/pbkdf2": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", - "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.3.tgz", + "integrity": "sha512-wfRLBZ0feWRhCIkoMB6ete7czJcnNnqRpcoWQBLqatqXXmelSRqfdDK4F3u9T2s2cXas/hQJcryI/4lAL+XTlA==", + "license": "MIT", "dependencies": { - "create-hash": "^1.1.2", - "create-hmac": "^1.1.4", - "ripemd160": "^2.0.1", - "safe-buffer": "^5.0.1", - "sha.js": "^2.4.8" + "create-hash": "~1.1.3", + "create-hmac": "^1.1.7", + "ripemd160": "=2.0.1", + "safe-buffer": "^5.2.1", + "sha.js": "^2.4.11", + "to-buffer": "^1.2.0" }, "engines": { "node": ">=0.12" } }, - "node_modules/picocolors": { + "node_modules/pbkdf2/node_modules/create-hash": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.1.3.tgz", + "integrity": "sha512-snRpch/kwQhcdlnZKYanNF1m0RDlrCdSKQaH87w1FCFPVPNCQ/Il9QJKAX2jVBZddRdaHBMC+zXa9Gw9tmkNUA==", + "license": "MIT", + "dependencies": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "sha.js": "^2.4.0" + } + }, + "node_modules/pbkdf2/node_modules/hash-base": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-2.0.2.tgz", + "integrity": "sha512-0TROgQ1/SxE6KmxWSvXHvRj90/Xo1JvZShofnYF+f6ZsGtR4eES7WfrQzPalmyagfKZCXpVnitiRebZulWsbiw==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1" + } + }, + "node_modules/pbkdf2/node_modules/ripemd160": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.1.tgz", + "integrity": "sha512-J7f4wutN8mdbV08MJnXibYpCOPHR+yzy+iQ/AsjMv2j8cLavQ8VGagDFUwwTAdF8FmRKVeNpbTTEwNHCW1g94w==", + "license": "MIT", + "dependencies": { + "hash-base": "^2.0.0", + "inherits": "^2.0.1" + } + }, + "node_modules/perfect-debounce": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", + "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" }, "node_modules/picomatch": { "version": "2.3.1", @@ -7196,21 +8917,76 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/pkg-types": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.0.3.tgz", - "integrity": "sha512-nN7pYi0AQqJnoLPC9eHFQ8AcyaixBUOwvqc5TDnIKCMEE6I0y8P7OKA7fPexsXGCGxQDl/cmrLAp26LhcwxZ4A==", + "node_modules/pinia": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/pinia/-/pinia-3.0.4.tgz", + "integrity": "sha512-l7pqLUFTI/+ESXn6k3nu30ZIzW5E2WZF/LaHJEpoq6ElcLD+wduZoB2kBN19du6K/4FDpPMazY2wJr+IndBtQw==", + "license": "MIT", + "dependencies": { + "@vue/devtools-api": "^7.7.7" + }, + "funding": { + "url": "https://github.com/sponsors/posva" + }, + "peerDependencies": { + "typescript": ">=4.5.0", + "vue": "^3.5.11" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/pinia/node_modules/@vue/devtools-api": { + "version": "7.7.8", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.7.8.tgz", + "integrity": "sha512-BtFcAmDbtXGwurWUFf8ogIbgZyR+rcVES1TSNEI8Em80fD8Anu+qTRN1Fc3J6vdRHlVM3fzPV1qIo+B4AiqGzw==", + "license": "MIT", + "dependencies": { + "@vue/devtools-kit": "^7.7.8" + } + }, + "node_modules/pkg-dir": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-5.0.0.tgz", + "integrity": "sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA==", "dev": true, "dependencies": { - "jsonc-parser": "^3.2.0", - "mlly": "^1.2.0", - "pathe": "^1.1.0" + "find-up": "^5.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/pkg-types": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", + "dependencies": { + "confbox": "^0.1.8", + "mlly": "^1.7.4", + "pathe": "^2.0.1" + } + }, + "node_modules/pkg-types/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" } }, "node_modules/postcss": { - "version": "8.4.32", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.32.tgz", - "integrity": "sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==", + "version": "8.4.49", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz", + "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==", "funding": [ { "type": "opencollective", @@ -7227,17 +9003,18 @@ ], "dependencies": { "nanoid": "^3.3.7", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" }, "engines": { "node": "^10 || ^12 || >=14" } }, "node_modules/postcss-selector-parser": { - "version": "6.0.13", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz", - "integrity": "sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ==", + "version": "6.0.15", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.15.tgz", + "integrity": "sha512-rEYkQOMUCEMhsKbK66tbEU9QVIxbhN18YiniAwA7XQYTVBqrBy+P2p5JcdqsHgKM2zWylp8d7J6eszocfds5Sw==", + "dev": true, "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" @@ -7250,48 +9027,39 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, "engines": { "node": ">= 0.8.0" } }, - "node_modules/pretty-format": { - "version": "27.5.1", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", - "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "node_modules/prettier": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", + "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1", - "ansi-styles": "^5.0.0", - "react-is": "^17.0.1" + "license": "MIT", + "peer": true, + "bin": { + "prettier": "bin/prettier.cjs" }, "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" - } - }, - "node_modules/pretty-format/node_modules/ansi-styles": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", - "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", - "dev": true, - "engines": { - "node": ">=10" + "node": ">=14" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/prettier/prettier?sponsor=1" } }, - "node_modules/pretty-format/node_modules/react-is": { - "version": "17.0.2", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", - "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", - "dev": true - }, - "node_modules/private": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz", - "integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==", + "node_modules/prettier-linter-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.1.tgz", + "integrity": "sha512-SxToR7P8Y2lWmv/kTzVLC1t/GDI2WGjMwNhLLE9qtH8Q13C+aEmuRlzDst4Up4s0Wc8sF2M+J57iB3cMLqftfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-diff": "^1.1.2" + }, "engines": { - "node": ">= 0.6" + "node": ">=6.0.0" } }, "node_modules/process": { @@ -7303,24 +9071,6 @@ "node": ">= 0.6.0" } }, - "node_modules/promise": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", - "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", - "dependencies": { - "asap": "~2.0.3" - } - }, - "node_modules/prop-types": { - "version": "15.8.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", - "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, "node_modules/property-expr": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/property-expr/-/property-expr-2.0.5.tgz", @@ -7350,12 +9100,6 @@ "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" }, - "node_modules/psl": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", - "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", - "dev": true - }, "node_modules/public-encrypt": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", @@ -7375,36 +9119,37 @@ "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, "engines": { "node": ">=6" } }, - "node_modules/q": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", - "integrity": "sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==", + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "license": "MIT", "engines": { - "node": ">=0.6.0", - "teleport": ">=0.2.0" + "node": ">=6" } }, "node_modules/qrcode.vue": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/qrcode.vue/-/qrcode.vue-3.4.1.tgz", - "integrity": "sha512-wq/zHsifH4FJ1GXQi8/wNxD1KfQkckIpjK1KPTc/qwYU5/Bkd4me0w4xZSg6EXk6xLBkVDE0zxVagewv5EMAVA==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/qrcode.vue/-/qrcode.vue-3.6.0.tgz", + "integrity": "sha512-vQcl2fyHYHMjDO1GguCldJxepq2izQjBkDEEu9NENgfVKP6mv/e2SU62WbqYHGwTgWXLhxZ1NCD1dAZKHQq1fg==", "peerDependencies": { "vue": "^3.0.0" } }, "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "dependencies": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" }, "engines": { "node": ">=0.6" @@ -7413,11 +9158,14 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/querystringify": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", - "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", - "dev": true + "node_modules/querystring-es3": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", + "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", + "dev": true, + "engines": { + "node": ">=0.4.x" + } }, "node_modules/queue-microtask": { "version": "1.2.3", @@ -7464,9 +9212,9 @@ } }, "node_modules/raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", @@ -7477,46 +9225,13 @@ "node": ">= 0.8" } }, - "node_modules/react": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/react/-/react-0.14.10.tgz", - "integrity": "sha512-yxMw5aorZG4qsLVBfjae4wGFvd5708DhcxaXLJ3IOTgr1TCs8k9+ZheGgLGr5OfwWMhSahNbGvvoEDzrxVWouA==", - "dependencies": { - "envify": "^3.0.0", - "fbjs": "^0.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-0.14.10.tgz", - "integrity": "sha512-kDs8SWFb8Sry4NAplhpJbZEeAnTPir/m+s9s+lkdqA2a89BzmWGnEgGG/CfmhULjv1ogc4oHrjMfAvFNruT3jQ==", - "peerDependencies": { - "react": "^0.14.10" - } - }, - "node_modules/react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" - }, - "node_modules/react-redux": { - "version": "4.4.10", - "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-4.4.10.tgz", - "integrity": "sha512-tjL0Bmpkj75Td0k+lXlF8Fc8a9GuXFv/3ahUOCXExWs/jhsKiQeTffdH0j5byejCGCRL4tvGFYlrwBF1X/Aujg==", + "node_modules/rc9": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/rc9/-/rc9-2.1.2.tgz", + "integrity": "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==", "dependencies": { - "create-react-class": "^15.5.1", - "hoist-non-react-statics": "^3.3.0", - "invariant": "^2.0.0", - "lodash": "^4.17.11", - "loose-envify": "^1.4.0", - "prop-types": "^15.7.2" - }, - "peerDependencies": { - "react": "^0.14.0 || ^15.0.0-0 || ^15.4.0-0 || ^16.0.0-0", - "redux": "^2.0.0 || ^3.0.0" + "defu": "^6.1.4", + "destr": "^2.0.3" } }, "node_modules/readable-stream": { @@ -7533,48 +9248,15 @@ } }, "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/recast": { - "version": "0.11.23", - "resolved": "https://registry.npmjs.org/recast/-/recast-0.11.23.tgz", - "integrity": "sha512-+nixG+3NugceyR8O1bLU45qs84JgI3+8EauyRZafLgC9XbdAOIVgwV1Pe2da0YzGo62KzWoZwUpVEQf6qNAXWA==", - "dependencies": { - "ast-types": "0.9.6", - "esprima": "~3.1.0", - "private": "~0.1.5", - "source-map": "~0.5.0" - }, + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.0.2.tgz", + "integrity": "sha512-yDMz9g+VaZkqBYS/ozoBJwaBhTbZo3UNYQHNRw1D3UFQB8oHB4uS/tAODO+ZLjGWmUbKnIlOWO+aaIiAxrUWHA==", "engines": { - "node": ">= 0.8" - } - }, - "node_modules/recast/node_modules/esprima": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-3.1.3.tgz", - "integrity": "sha512-AWwVMNxwhN8+NIPQzAQZCm7RkLC4RbM3B1OobMuyp3i+w73X57KCKaVIxaRZb+DYCojq7rspo+fmuQfAboyhFg==", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" + "node": ">= 14.16.0" }, - "engines": { - "node": ">=4" - } - }, - "node_modules/recast/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", - "engines": { - "node": ">=0.10.0" + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" } }, "node_modules/rechoir": { @@ -7589,30 +9271,49 @@ "node": ">= 0.10" } }, - "node_modules/redux": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/redux/-/redux-3.7.2.tgz", - "integrity": "sha512-pNqnf9q1hI5HHZRBkj3bAngGZW/JMCmexDlOxw4XagXY2o1327nHH54LoTjiPJ0gizoqPDRqWyX/00g0hD6w+A==", + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "dev": true, + "license": "MIT", "dependencies": { - "lodash": "^4.2.1", - "lodash-es": "^4.2.1", - "loose-envify": "^1.1.0", - "symbol-observable": "^1.0.3" + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/regenerator-runtime": { - "version": "0.13.11", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", - "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==" + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", + "dev": true, + "license": "MIT" }, "node_modules/regexp.prototype.flags": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz", - "integrity": "sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==", + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "functions-have-names": "^1.2.3" + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -7621,21 +9322,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/requires-port": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", - "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", - "dev": true - }, "node_modules/resize-observer-polyfill": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz", @@ -7643,17 +9329,22 @@ "dev": true }, "node_modules/resolve": { - "version": "1.22.2", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz", - "integrity": "sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==", + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", "dependencies": { - "is-core-module": "^2.11.0", + "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -7662,6 +9353,8 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -7675,38 +9368,11 @@ "node": ">=0.10.0" } }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } + "node_modules/rfdc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", + "license": "MIT" }, "node_modules/ripemd160": { "version": "2.0.2", @@ -7718,10 +9384,11 @@ } }, "node_modules/rollup": { - "version": "3.29.4", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", - "integrity": "sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==", + "version": "3.29.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.5.tgz", + "integrity": "sha512-GVsDdsbJzzy4S/v3dqWPJ7EfvZJfCHiDqe80IyrF59LYuP+e6U1LJoUqeuqRbwAWoMNoXivMNeNAOf5E22VA1w==", "devOptional": true, + "peer": true, "bin": { "rollup": "dist/bin/rollup" }, @@ -7746,9 +9413,9 @@ } }, "node_modules/rrweb-cssom": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.6.0.tgz", - "integrity": "sha512-APM0Gt1KoXBz0iIkkdB/kfvGOwC4UuJFeG/c+yV7wSc7q96cG/kJ0HiYCnzivD9SB53cLV1MlHFNfOuPaadYSw==", + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz", + "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==", "dev": true }, "node_modules/run-parallel": { @@ -7773,6 +9440,26 @@ "queue-microtask": "^1.2.2" } }, + "node_modules/safe-array-concat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -7792,14 +9479,36 @@ } ] }, - "node_modules/safe-regex-test": { + "node_modules/safe-push-apply": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -7811,12 +9520,13 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "node_modules/sass": { - "version": "1.69.7", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.69.7.tgz", - "integrity": "sha512-rzj2soDeZ8wtE2egyLXgOOHQvaC2iosZrkF6v3EUG+tBwEvhqUCzm0VP3k9gHF9LXbSrRhT5SksoI56Iw8NPnQ==", + "version": "1.97.3", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.97.3.tgz", + "integrity": "sha512-fDz1zJpd5GycprAbu4Q2PV/RprsRtKC/0z82z0JLgdytmcq0+ujJbJ/09bPGDxCLkKY3Np5cRAOcWiVkLXJURg==", + "license": "MIT", "dependencies": { - "chokidar": ">=3.0.0 <4.0.0", - "immutable": "^4.0.0", + "chokidar": "^4.0.0", + "immutable": "^5.0.2", "source-map-js": ">=0.6.2 <2.0.0" }, "bin": { @@ -7824,6 +9534,9 @@ }, "engines": { "node": ">=14.0.0" + }, + "optionalDependencies": { + "@parcel/watcher": "^2.4.1" } }, "node_modules/saxes": { @@ -7839,9 +9552,9 @@ } }, "node_modules/schema-utils": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.2.tgz", - "integrity": "sha512-pvjEHOgWc9OWA/f/DE3ohBWTD6EleVLf7iFUkoSwAxttdBhB9QUebQgxER2kWueOvRJXPHNnyrvvh9eZINB8Eg==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", + "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", "dev": true, "dependencies": { "@types/json-schema": "^7.0.8", @@ -7856,10 +9569,16 @@ "url": "https://opencollective.com/webpack" } }, + "node_modules/scule": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/scule/-/scule-1.3.0.tgz", + "integrity": "sha512-6FtHJEvt+pVMIB9IBY+IcCJ6Z5f1iQnytgyfKMhDKgmzYG+TeH/wx1y3l27rshSbLiSanrR9ffZDrEsmjlQF2g==" + }, "node_modules/section-matter": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", + "license": "MIT", "dependencies": { "extend-shallow": "^2.0.1", "kind-of": "^6.0.0" @@ -7869,12 +9588,10 @@ } }, "node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dependencies": { - "lru-cache": "^6.0.0" - }, + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "license": "ISC", "bin": { "semver": "bin/semver.js" }, @@ -7882,26 +9599,10 @@ "node": ">=10" } }, - "node_modules/semver/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/semver/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", @@ -7934,56 +9635,143 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, - "node_modules/send/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } }, "node_modules/serialize-javascript": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz", - "integrity": "sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", "dev": true, "peer": true, "dependencies": { "randombytes": "^2.1.0" } }, + "node_modules/seroval": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/seroval/-/seroval-1.4.2.tgz", + "integrity": "sha512-N3HEHRCZYn3cQbsC4B5ldj9j+tHdf4JZoYPlcI4rRYu0Xy4qN8MQf1Z08EibzB0WpgRG5BGK08FTrmM66eSzKQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/seroval-plugins": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/seroval-plugins/-/seroval-plugins-1.2.1.tgz", + "integrity": "sha512-H5vs53+39+x4Udwp4J5rNZfgFuA+Lt+uU+09w1gYBVWomtAl98B+E9w7yC05Xc81/HgLvJdlyqJbU0fJCKCmdw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "seroval": "^1.0" + } + }, "node_modules/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", "dependencies": { - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", - "send": "0.18.0" + "send": "0.19.0" }, "engines": { "node": ">= 0.8.0" } }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "dev": true + }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "node_modules/sha.js": { - "version": "2.4.11", - "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", - "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "version": "2.4.12", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.12.tgz", + "integrity": "sha512-8LzC5+bvI45BjpfXU8V5fdU2mfeKiQe1D1gIMn7XUlF3OTUrpdJpPPH4EMAnF0DsHHdSZqCdSss5qCmJKuiO3w==", + "license": "(MIT AND BSD-3-Clause)", "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" + "inherits": "^2.0.4", + "safe-buffer": "^5.2.1", + "to-buffer": "^1.2.0" }, "bin": { "sha.js": "bin.js" + }, + "engines": { + "node": ">= 0.10" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, "dependencies": { "shebang-regex": "^3.0.0" }, @@ -7995,6 +9783,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, "engines": { "node": ">=8" } @@ -8005,45 +9794,104 @@ "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", "dev": true, "dependencies": { - "glob": "^7.0.0", - "interpret": "^1.0.0", - "rechoir": "^0.6.2" - }, - "bin": { - "shjs": "bin/shjs" + "glob": "^7.0.0", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + }, + "bin": { + "shjs": "bin/shjs" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/shelljs/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" }, "engines": { - "node": ">=4" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/shelljs/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "dev": true, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" }, "engines": { - "node": "*" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8055,32 +9903,47 @@ "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", "dev": true }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true - }, "node_modules/sirv": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.3.tgz", - "integrity": "sha512-O9jm9BsID1P+0HOi81VpXPoDxYP374pkOLzACAoyUQ/3OUVndNpsz6wMnY2z+yOxzbllCKZrM+9QrWsv4THnyA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.0.tgz", + "integrity": "sha512-BPwJGUeDaDCHihkORDchNyyTvWFhcusy1XMmhEVTQTwGeybFbp8YEmB+njbPnth1FibULBSBVwCQni25XlCUDg==", "dev": true, "dependencies": { - "@polka/url": "^1.0.0-next.20", - "mrmime": "^1.0.0", + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", "totalist": "^3.0.0" }, "engines": { - "node": ">= 10" + "node": ">=18" } }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "node_modules/solid-js": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/solid-js/-/solid-js-1.9.5.tgz", + "integrity": "sha512-ogI3DaFcyn6UhYhrgcyRAMbu/buBJitYQASZz5WzfQVPP10RD2AbCoRZ517psnezrasyCbWzIxZ6kVqet768xw==", + "dev": true, + "dependencies": { + "csstype": "^3.1.0", + "seroval": "^1.1.0", + "seroval-plugins": "^1.1.0" + } + }, + "node_modules/solid-transition-group": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/solid-transition-group/-/solid-transition-group-0.2.3.tgz", + "integrity": "sha512-iB72c9N5Kz9ykRqIXl0lQohOau4t0dhel9kjwFvx81UZJbVwaChMuBuyhiZmK24b8aKEK0w3uFM96ZxzcyZGdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@solid-primitives/refs": "^1.0.5", + "@solid-primitives/transition-group": "^1.0.2" + }, "engines": { - "node": ">=8" + "node": ">=18.0.0", + "pnpm": ">=8.6.0" + }, + "peerDependencies": { + "solid-js": "^1.6.12" } }, "node_modules/source-map": { @@ -8093,9 +9956,9 @@ } }, "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "engines": { "node": ">=0.10.0" } @@ -8110,10 +9973,19 @@ "source-map": "^0.6.0" } }, + "node_modules/speakingurl": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/speakingurl/-/speakingurl-14.0.1.tgz", + "integrity": "sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "license": "BSD-3-Clause" }, "node_modules/sshpk": { "version": "1.18.0", @@ -8154,22 +10026,46 @@ } }, "node_modules/std-env": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.3.3.tgz", - "integrity": "sha512-Rz6yejtVyWnVjC1RFvNmYL10kgjC49EOghxWn0RFqlCHGFpQx+Xe7yW3I4ceK1SGrWIGMjD5Kbue8W/udkbMJg==", - "dev": true + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.0.tgz", + "integrity": "sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==" }, "node_modules/stop-iteration-iterator": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", - "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "dev": true, + "license": "MIT", "dependencies": { - "internal-slot": "^1.0.4" + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" }, "engines": { "node": ">= 0.4" } }, + "node_modules/stream-browserify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz", + "integrity": "sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==", + "dev": true, + "dependencies": { + "inherits": "~2.0.4", + "readable-stream": "^3.5.0" + } + }, + "node_modules/stream-http": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-3.2.0.tgz", + "integrity": "sha512-Oq1bLqisTyK3TSCXpPbT4sdeYNdmyZJv1LxpEm2vu1ZhK89kSE5YXwZc3cWk0MagGaKriBh9mCFbVGtO+vY29A==", + "dev": true, + "dependencies": { + "builtin-status-codes": "^3.0.0", + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "xtend": "^4.0.2" + } + }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", @@ -8192,38 +10088,47 @@ "node": ">=8" } }, - "node_modules/string-width/node_modules/emoji-regex": { + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true }, - "node_modules/string.prototype.matchall": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz", - "integrity": "sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "regexp.prototype.flags": "^1.4.3", - "side-channel": "^1.0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "node_modules/string-width/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "node_modules/string.prototype.trim": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", - "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -8233,26 +10138,37 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8262,6 +10178,20 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -8273,6 +10203,8 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -8281,20 +10213,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/strip-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", - "integrity": "sha512-I5iQq6aFMM62fBEAIB/hXzwJD6EEZ0xEGCX2t7oXqaKPIRgt4WruAQ285BISgdkP+HLGWyeGmNJcpIwFeRYRUA==", - "dependencies": { - "get-stdin": "^4.0.1" - }, - "bin": { - "strip-indent": "cli.js" - }, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -8303,6 +10222,8 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", "engines": { "node": ">=8" }, @@ -8311,21 +10232,38 @@ } }, "node_modules/strip-literal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-1.0.1.tgz", - "integrity": "sha512-QZTsipNpa2Ppr6v1AmJHESqJ3Uz247MUS0OjrnnZjFAvEoWqxuyFuXn2xLgMtRnijJShAa1HL0gtJyUs7u7n3Q==", - "dev": true, + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", + "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", "dependencies": { - "acorn": "^8.8.2" + "js-tokens": "^9.0.1" }, "funding": { "url": "https://github.com/sponsors/antfu" } }, + "node_modules/strip-literal/node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==" + }, + "node_modules/superjson": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/superjson/-/superjson-2.2.5.tgz", + "integrity": "sha512-zWPTX96LVsA/eVYnqOM2+ofcdPqdS1dAF1LN4TS2/MWuUpfitd9ctTa87wt4xrYnZnkLtS69xpBdSxVBP5Rm6w==", + "license": "MIT", + "dependencies": { + "copy-anything": "^4" + }, + "engines": { + "node": ">=16" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, "dependencies": { "has-flag": "^4.0.0" }, @@ -8337,6 +10275,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, "engines": { "node": ">= 0.4" }, @@ -8344,20 +10283,28 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/symbol-observable": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", - "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", "dev": true }, + "node_modules/synckit": { + "version": "0.11.12", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.12.tgz", + "integrity": "sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.2.9" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/synckit" + } + }, "node_modules/tapable": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", @@ -8368,14 +10315,42 @@ "node": ">=6" } }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/terser": { - "version": "5.27.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.27.0.tgz", - "integrity": "sha512-bi1HRwVRskAjheeYl291n3JC4GgO/Ty4z1nVs5AAsmonJulGxpSektecnNedrwK9C7vpvVtcX3cw00VSLt7U2A==", + "version": "5.46.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.46.0.tgz", + "integrity": "sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==", "devOptional": true, + "license": "BSD-2-Clause", "dependencies": { "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.8.2", + "acorn": "^8.15.0", "commander": "^2.20.0", "source-map-support": "~0.5.20" }, @@ -8387,17 +10362,17 @@ } }, "node_modules/terser-webpack-plugin": { - "version": "5.3.9", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz", - "integrity": "sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA==", + "version": "5.3.10", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz", + "integrity": "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w==", "dev": true, "peer": true, "dependencies": { - "@jridgewell/trace-mapping": "^0.3.17", + "@jridgewell/trace-mapping": "^0.3.20", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.1", - "terser": "^5.16.8" + "terser": "^5.26.0" }, "engines": { "node": ">= 10.13.0" @@ -8421,96 +10396,141 @@ } } }, - "node_modules/test-exclude": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", - "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "node_modules/timers-browserify": { + "version": "2.0.12", + "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz", + "integrity": "sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==", "dev": true, "dependencies": { - "@istanbuljs/schema": "^0.1.2", - "glob": "^7.1.4", - "minimatch": "^3.0.4" + "setimmediate": "^1.0.4" }, "engines": { - "node": ">=8" + "node": ">=0.6.0" } }, - "node_modules/test-exclude/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "node_modules/tiny-case": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tiny-case/-/tiny-case-1.0.3.tgz", + "integrity": "sha512-Eet/eeMhkO6TX8mnUteS9zgPbUMQa4I6Kkp5ORiBD5476/m+PIRiumP5tmh5ioJpH7k51Kehawy2UDfsnxxY8Q==", + "dev": true + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true + }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==" + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", "dev": true, + "license": "MIT", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "fdir": "^6.5.0", + "picomatch": "^4.0.3" }, "engines": { - "node": "*" + "node": ">=12.0.0" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/sponsors/SuperchupuDev" } }, - "node_modules/text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" - }, - "node_modules/through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" - }, - "node_modules/time-zone": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/time-zone/-/time-zone-1.0.0.tgz", - "integrity": "sha512-TIsDdtKo6+XrPtiTm1ssmMngN1sAhyKnTO2kunQWqNPWIVvCm15Wmw4SWInwTVgJ5u/Tr04+8Ei9TNcw4x4ONA==", + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", "dev": true, + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } } }, - "node_modules/tiny-case": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-case/-/tiny-case-1.0.3.tgz", - "integrity": "sha512-Eet/eeMhkO6TX8mnUteS9zgPbUMQa4I6Kkp5ORiBD5476/m+PIRiumP5tmh5ioJpH7k51Kehawy2UDfsnxxY8Q==", - "dev": true - }, - "node_modules/tinybench": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.5.0.tgz", - "integrity": "sha512-kRwSG8Zx4tjF9ZiyH4bhaebu+EDz1BOx9hOigYHlUW4xxI/wKIUQUqo018UlU4ar6ATPBsaMrdbKZ+tmPdohFA==", - "dev": true + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } }, "node_modules/tinypool": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.5.0.tgz", - "integrity": "sha512-paHQtnrlS1QZYKF/GnLoOM/DN9fqaGOFbCbxzAhwniySnzl9Ebk8w73/dd34DAhe/obUbPAOldTyYXQZxnPBPQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", + "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", + "dev": true, + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, + "node_modules/tinyrainbow": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz", + "integrity": "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==", "dev": true, "engines": { "node": ">=14.0.0" } }, "node_modules/tinyspy": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-2.1.1.tgz", - "integrity": "sha512-XPJL2uSzcOyBMky6OFrusqWlzfFrXtE0hPuMgW8A2HmaqrPo4ZQHRN/V0QXN3FSjKxpsbRrFc5LI7KOwBsT1/w==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", + "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=14.0.0" } }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "node_modules/tldts": { + "version": "6.1.85", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.85.tgz", + "integrity": "sha512-gBdZ1RjCSevRPFix/hpaUWeak2/RNUZB4/8frF1r5uYMHjFptkiT0JXIebWvgI/0ZHXvxaUDDJshiA0j6GdL3w==", "dev": true, + "dependencies": { + "tldts-core": "^6.1.85" + }, + "bin": { + "tldts": "bin/cli.js" + } + }, + "node_modules/tldts-core": { + "version": "6.1.85", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.85.tgz", + "integrity": "sha512-DTjUVvxckL1fIoPSb3KE7ISNtkWSawZdpfxGxwiIrZoO6EbHVDXXUIlIuWympPaeS+BLGyggozX/HTMsRAdsoA==", + "dev": true + }, + "node_modules/to-buffer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.2.1.tgz", + "integrity": "sha512-tB82LpAIWjhLYbqjx3X4zEeHN6M8CiuOEy2JY8SEQVdYRe3CCHOFaqrBW1doLDrfpWhplcW7BL+bO3/6S3pcDQ==", + "license": "MIT", + "dependencies": { + "isarray": "^2.0.5", + "safe-buffer": "^5.2.1", + "typed-array-buffer": "^1.0.3" + }, "engines": { - "node": ">=4" + "node": ">= 0.4" } }, "node_modules/to-regex-range": { @@ -8548,36 +10568,48 @@ } }, "node_modules/tough-cookie": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", - "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz", + "integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==", "dev": true, "dependencies": { - "psl": "^1.1.33", - "punycode": "^2.1.1", - "universalify": "^0.2.0", - "url-parse": "^1.5.3" + "tldts": "^6.1.32" }, "engines": { - "node": ">=6" + "node": ">=16" } }, "node_modules/tr46": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-4.1.1.tgz", - "integrity": "sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.0.tgz", + "integrity": "sha512-IUWnUK7ADYR5Sl1fZlO1INDUhVhatWl7BtJWsIhwJ0UAK7ilzzIa8uIqOO/aYVWHZPJkKbEL+362wrzoeRF7bw==", "dev": true, "dependencies": { - "punycode": "^2.3.0" + "punycode": "^2.3.1" }, "engines": { - "node": ">=14" + "node": ">=18" + } + }, + "node_modules/ts-api-utils": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz", + "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" } }, "node_modules/tsconfig-paths": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", - "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", + "dev": true, + "license": "MIT", "dependencies": { "@types/json5": "^0.0.29", "json5": "^1.0.2", @@ -8585,23 +10617,19 @@ "strip-bom": "^3.0.0" } }, - "node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + "node_modules/tty-browserify": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz", + "integrity": "sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw==", + "dev": true }, - "node_modules/tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "node_modules/turndown": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/turndown/-/turndown-7.2.2.tgz", + "integrity": "sha512-1F7db8BiExOKxjSMU2b7if62D/XOyQyZbPKq/nUwopfgnHlqXHqQ0lvfUTeUIr1lZJzOPFn43dODyMSIfvWRKQ==", + "license": "MIT", "dependencies": { - "tslib": "^1.8.1" - }, - "engines": { - "node": ">= 6" - }, - "peerDependencies": { - "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + "@mixmark-io/domino": "^2.2.0" } }, "node_modules/tweetnacl": { @@ -8613,6 +10641,7 @@ "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, "dependencies": { "prelude-ls": "^1.2.1" }, @@ -8620,19 +10649,11 @@ "node": ">= 0.8.0" } }, - "node_modules/type-detect": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/type-fest": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, "engines": { "node": ">=10" }, @@ -8652,23 +10673,89 @@ "node": ">= 0.6" } }, - "node_modules/typed-array-length": { + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" + "gopd": "^1.0.1", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "devOptional": true, + "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -8677,62 +10764,153 @@ "node": ">=14.17" } }, - "node_modules/ua-parser-js": { - "version": "0.7.35", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.35.tgz", - "integrity": "sha512-veRf7dawaj9xaWEu9HoTVn5Pggtc/qj+kqTOFvNiN1l0YdxwC1kvel57UCjThjGa3BHBihE8/UJAHI+uQHmd/g==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/ua-parser-js" - }, - { - "type": "paypal", - "url": "https://paypal.me/faisalman" - } - ], + "node_modules/typescript-eslint": { + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.54.0.tgz", + "integrity": "sha512-CKsJ+g53QpsNPqbzUsfKVgd3Lny4yKZ1pP4qN3jdMOg/sisIDLGyDMezycquXLE5JsEU0wp3dGNdzig0/fmSVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.54.0", + "@typescript-eslint/parser": "8.54.0", + "@typescript-eslint/typescript-estree": "8.54.0", + "@typescript-eslint/utils": "8.54.0" + }, "engines": { - "node": "*" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "license": "MIT" }, "node_modules/ufo": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.3.2.tgz", - "integrity": "sha512-o+ORpgGwaYQXgqGDwd+hkS4PuZ3QnmqMMxRuajK/a38L6fTpcE5GPIfrf+L/KemFzfUpeUQc1rRS1iDBozvnFA==", - "dev": true + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.4.tgz", + "integrity": "sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==" }, "node_modules/unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.2", + "call-bound": "^1.0.3", "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/unctx": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/unctx/-/unctx-2.4.1.tgz", + "integrity": "sha512-AbaYw0Nm4mK4qjhns67C+kgxR2YWiwlDBPzxrN8h8C6VtAdCgditAY5Dezu3IJy4XVqAnbrXt9oQJvsn3fyozg==", + "dependencies": { + "acorn": "^8.14.0", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17", + "unplugin": "^2.1.0" + } + }, + "node_modules/unctx/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", "devOptional": true }, - "node_modules/universalify": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", - "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", - "dev": true, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unimport": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/unimport/-/unimport-4.1.1.tgz", + "integrity": "sha512-j9+fijH6aDd05yv1fXlyt7HSxtOWtGtrZeYTVBsSUg57Iuf+Ps2itIZjeyu7bEQ4k0WOgYhHrdW8m/pJgOpl5g==", + "dependencies": { + "acorn": "^8.14.0", + "escape-string-regexp": "^5.0.0", + "estree-walker": "^3.0.3", + "fast-glob": "^3.3.3", + "local-pkg": "^1.0.0", + "magic-string": "^0.30.17", + "mlly": "^1.7.4", + "pathe": "^2.0.2", + "picomatch": "^4.0.2", + "pkg-types": "^1.3.1", + "scule": "^1.3.0", + "strip-literal": "^3.0.0", + "unplugin": "^2.1.2", + "unplugin-utils": "^0.2.3" + }, + "engines": { + "node": ">=18.12.0" + } + }, + "node_modules/unimport/node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unimport/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/unimport/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, + "node_modules/unimport/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", "engines": { - "node": ">= 4.0.0" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, "node_modules/unpipe": { @@ -8744,68 +10922,138 @@ } }, "node_modules/unplugin": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-1.6.0.tgz", - "integrity": "sha512-BfJEpWBu3aE/AyHx8VaNE/WgouoQxgH9baAiH82JjX8cqVyi3uJQstqwD5J+SZxIK326SZIhsSZlALXVBCknTQ==", + "version": "2.3.10", + "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-2.3.10.tgz", + "integrity": "sha512-6NCPkv1ClwH+/BGE9QeoTIl09nuiAt0gS28nn1PvYXsGKRwM2TCbFA2QiilmehPDTXIe684k4rZI1yl3A1PCUw==", + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.5", + "acorn": "^8.15.0", + "picomatch": "^4.0.3", + "webpack-virtual-modules": "^0.6.2" + }, + "engines": { + "node": ">=18.12.0" + } + }, + "node_modules/unplugin-utils": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.2.4.tgz", + "integrity": "sha512-8U/MtpkPkkk3Atewj1+RcKIjb5WBimZ/WSLhhR3w6SsIj8XJuKTacSP8g+2JhfSGw0Cb125Y+2zA/IzJZDVbhA==", "dependencies": { - "acorn": "^8.11.2", - "chokidar": "^3.5.3", - "webpack-sources": "^3.2.3", - "webpack-virtual-modules": "^0.6.1" + "pathe": "^2.0.2", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=18.12.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/unplugin-utils/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, + "node_modules/unplugin-utils/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, "node_modules/unplugin-vue-markdown": { - "version": "0.25.2", - "resolved": "https://registry.npmjs.org/unplugin-vue-markdown/-/unplugin-vue-markdown-0.25.2.tgz", - "integrity": "sha512-bDDWqtK1PUkWK/+kczOk33hqO5WulOUx5ZxfbCZuVArcUSwY7aB2vf4e2K+qdrlxalxkpjIA64z/liOrC/cjiQ==", + "version": "29.2.0", + "resolved": "https://registry.npmjs.org/unplugin-vue-markdown/-/unplugin-vue-markdown-29.2.0.tgz", + "integrity": "sha512-/x2hFgQ6cWN1Kls+yK5mAI9YDmeTofftynVGgOy1llBlDX1ifaXsQBls/bpORaiwn7cxA7HkOo0wn/xKcrXBHA==", + "license": "MIT", "dependencies": { - "@mdit-vue/plugin-component": "^1.0.0", - "@mdit-vue/plugin-frontmatter": "^1.0.0", - "@mdit-vue/types": "^1.0.0", - "@rollup/pluginutils": "^5.0.5", - "@types/markdown-it": "^13.0.6", - "markdown-it": "^13.0.2", - "unplugin": "^1.5.0" + "@mdit-vue/plugin-component": "^3.0.2", + "@mdit-vue/plugin-frontmatter": "^3.0.2", + "@mdit-vue/types": "^3.0.2", + "@types/markdown-it": "^14.1.2", + "markdown-it": "^14.1.0", + "markdown-it-async": "^2.2.0", + "unplugin": "^2.3.10", + "unplugin-utils": "^0.3.0" + }, + "engines": { + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/antfu" }, "peerDependencies": { - "vite": "^2.0.0 || ^3.0.0-0 || ^4.0.0-0 || ^5.0.0-0" + "vite": "^2.0.0 || ^3.0.0-0 || ^4.0.0-0 || ^5.0.0-0 || ^6.0.0 || ^7.0.0" } }, - "node_modules/unplugin-vue-markdown/node_modules/@mdit-vue/plugin-component": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@mdit-vue/plugin-component/-/plugin-component-1.0.0.tgz", - "integrity": "sha512-ZXsJwxkG5yyTHARIYbR74cT4AZ0SfMokFFjiHYCbypHIeYWgJhso4+CZ8+3V9EWFG3EHlGoKNGqKp9chHnqntQ==", - "dependencies": { - "@types/markdown-it": "^13.0.1", - "markdown-it": "^13.0.1" + "node_modules/unplugin-vue-markdown/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "license": "MIT" + }, + "node_modules/unplugin-vue-markdown/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/unplugin-vue-markdown/node_modules/@mdit-vue/plugin-frontmatter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@mdit-vue/plugin-frontmatter/-/plugin-frontmatter-1.0.0.tgz", - "integrity": "sha512-MMA7Ny+YPZA7eDOY1t4E+rKuEWO39mzDdP/M68fKdXJU6VfcGkPr7gnpnJfW2QBJ5qIvMrK/3lDAA2JBy5TfpA==", + "node_modules/unplugin-vue-markdown/node_modules/unplugin-utils": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.3.0.tgz", + "integrity": "sha512-JLoggz+PvLVMJo+jZt97hdIIIZ2yTzGgft9e9q8iMrC4ewufl62ekeW7mixBghonn2gVb/ICjyvlmOCUBnJLQg==", + "license": "MIT", "dependencies": { - "@mdit-vue/types": "1.0.0", - "@types/markdown-it": "^13.0.1", - "gray-matter": "^4.0.3", - "markdown-it": "^13.0.1" + "pathe": "^2.0.3", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" } }, - "node_modules/unplugin-vue-markdown/node_modules/@mdit-vue/types": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@mdit-vue/types/-/types-1.0.0.tgz", - "integrity": "sha512-xeF5+sHLzRNF7plbksywKCph4qli20l72of2fMlZQQ7RECvXYrRkE9+bjRFQCyULC7B8ydUYbpbkux5xJlVWyw==" + "node_modules/unplugin/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } }, - "node_modules/unplugin-vue-markdown/node_modules/@types/markdown-it": { - "version": "13.0.7", - "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-13.0.7.tgz", - "integrity": "sha512-U/CBi2YUUcTHBt5tjO2r5QV/x0Po6nsYwQU4Y04fBS6vfoImaiZ6f8bi3CjTCxBPQSO1LMyUqkByzi8AidyxfA==", + "node_modules/untyped": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/untyped/-/untyped-1.5.2.tgz", + "integrity": "sha512-eL/8PlhLcMmlMDtNPKhyyz9kEBDS3Uk4yMu/ewlkT2WFbtzScjHWPJLdQLmaGPUKjXzwe9MumOtOgc4Fro96Kg==", "dependencies": { - "@types/linkify-it": "*", - "@types/mdurl": "*" + "@babel/core": "^7.26.0", + "@babel/standalone": "^7.26.4", + "@babel/types": "^7.26.3", + "citty": "^0.1.6", + "defu": "^6.1.4", + "jiti": "^2.4.1", + "knitwork": "^1.2.0", + "scule": "^1.3.0" + }, + "bin": { + "untyped": "dist/cli.mjs" } }, "node_modules/upath": { @@ -8813,16 +11061,16 @@ "resolved": "https://registry.npmjs.org/upath/-/upath-2.0.1.tgz", "integrity": "sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w==", "devOptional": true, + "license": "MIT", "engines": { "node": ">=4", "yarn": "*" } }, "node_modules/update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", - "dev": true, + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "funding": [ { "type": "opencollective", @@ -8837,10 +11085,9 @@ "url": "https://github.com/sponsors/ai" } ], - "peer": true, "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" }, "bin": { "update-browserslist-db": "cli.js" @@ -8853,20 +11100,27 @@ "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, "dependencies": { "punycode": "^2.1.0" } }, - "node_modules/url-parse": { - "version": "1.5.10", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", - "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "node_modules/url": { + "version": "0.11.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.3.tgz", + "integrity": "sha512-6hxOLGfZASQK/cijlZnZJTq8OXAkt/3YGfQX45vvMYXpZoo8NdWZcY73K108Jf759lS1Bv/8wXnHDTSz17dSRw==", "dev": true, "dependencies": { - "querystringify": "^2.1.1", - "requires-port": "^1.0.0" + "punycode": "^1.4.1", + "qs": "^6.11.2" } }, + "node_modules/url/node_modules/punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", + "dev": true + }, "node_modules/util": { "version": "0.12.5", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", @@ -8892,20 +11146,6 @@ "node": ">= 0.4.0" } }, - "node_modules/v8-to-istanbul": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz", - "integrity": "sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA==", - "dev": true, - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.12", - "@types/istanbul-lib-coverage": "^2.0.1", - "convert-source-map": "^1.6.0" - }, - "engines": { - "node": ">=10.12.0" - } - }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -8915,15 +11155,24 @@ } }, "node_modules/vee-validate": { - "version": "4.12.4", - "resolved": "https://registry.npmjs.org/vee-validate/-/vee-validate-4.12.4.tgz", - "integrity": "sha512-rqSjMdl0l/RiGKywKhkXttUKwDlQOoxTxe31uMQiMlwK4Hbtlvr3OcQvpREp/qPTARxNKudKWCUVW/mfzuxUVQ==", + "version": "4.15.1", + "resolved": "https://registry.npmjs.org/vee-validate/-/vee-validate-4.15.1.tgz", + "integrity": "sha512-DkFsiTwEKau8VIxyZBGdO6tOudD+QoUBPuHj3e6QFqmbfCRj1ArmYWue9lEp6jLSWBIw4XPlDLjFIZNLdRAMSg==", + "license": "MIT", "dependencies": { - "@vue/devtools-api": "^6.5.1", + "@vue/devtools-api": "^7.5.2", "type-fest": "^4.8.3" }, "peerDependencies": { - "vue": "^3.3.11" + "vue": "^3.4.26" + } + }, + "node_modules/vee-validate/node_modules/@vue/devtools-api": { + "version": "7.5.2", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.5.2.tgz", + "integrity": "sha512-VxPbAQxJrYSIkoGVvQ2oOoKW8u4CMpvRLySTxhoJA38z8bQEGy9GO33eoRY/DulJbSFRfjZFNvH+dh8B4qpesQ==", + "dependencies": { + "@vue/devtools-kit": "^7.5.2" } }, "node_modules/vee-validate/node_modules/type-fest": { @@ -8948,13 +11197,14 @@ } }, "node_modules/vite": { - "version": "5.0.12", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.12.tgz", - "integrity": "sha512-4hsnEkG3q0N4Tzf1+t6NdN9dg/L3BM+q8SWgbSPnJvrgH2kgdyzfVJwbR1ic69/4uMJJ/3dqDZZE5/WwqW8U1w==", + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "license": "MIT", "dependencies": { - "esbuild": "^0.19.3", - "postcss": "^8.4.32", - "rollup": "^4.2.0" + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" }, "bin": { "vite": "bin/vite.js" @@ -8973,6 +11223,7 @@ "less": "*", "lightningcss": "^1.21.0", "sass": "*", + "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.4.0" @@ -8990,6 +11241,9 @@ "sass": { "optional": true }, + "sass-embedded": { + "optional": true + }, "stylus": { "optional": true }, @@ -9002,9 +11256,10 @@ } }, "node_modules/vite-node": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.2.0.tgz", - "integrity": "sha512-ETnQTHeAbbOxl7/pyBck9oAPZZZo+kYnFt1uQDD+hPReOc+wCjXw4r4jHriBRuVDB5isHmPXxrfc1yJnfBERqg==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.6.1.tgz", + "integrity": "sha512-YAXkfvGtuTzwWbDSACdJSg4A4DZiAqckWe90Zapc/sEX3XvHcw1NdurM/6od8J207tSDqNbSsgdCacBgvJKFuA==", + "license": "MIT", "dependencies": { "cac": "^6.7.14", "debug": "^4.3.4", @@ -9022,6 +11277,22 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/vite-plugin-node-polyfills": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/vite-plugin-node-polyfills/-/vite-plugin-node-polyfills-0.23.0.tgz", + "integrity": "sha512-4n+Ys+2bKHQohPBKigFlndwWQ5fFKwaGY6muNDMTb0fSQLyBzS+jjUNRZG9sKF0S/Go4ApG6LFnUGopjkILg3w==", + "dev": true, + "dependencies": { + "@rollup/plugin-inject": "^5.0.5", + "node-stdlib-browser": "^1.2.0" + }, + "funding": { + "url": "https://github.com/sponsors/davidmyersdev" + }, + "peerDependencies": { + "vite": "^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" + } + }, "node_modules/vite-plugin-require-transform": { "version": "1.0.21", "resolved": "https://registry.npmjs.org/vite-plugin-require-transform/-/vite-plugin-require-transform-1.0.21.tgz", @@ -9035,141 +11306,47 @@ } }, "node_modules/vite-plugin-vuetify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/vite-plugin-vuetify/-/vite-plugin-vuetify-2.0.1.tgz", - "integrity": "sha512-GlRVAruohE8b0FqmeYYh1cYg3n8THGOv066uMA44qLv9uhUxSLw55CS7fi2yU0wH363TJ2vq36zUsPTjRFrjGQ==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/vite-plugin-vuetify/-/vite-plugin-vuetify-2.1.3.tgz", + "integrity": "sha512-Q4SC/4TqbNvaZIFb9YsfBqkGlYHbJJJ6uU3CnRBZqLUF3s5eCMVZAaV4GkTbehIH/bhSj42lMXztOwc71u6rVw==", "devOptional": true, + "license": "MIT", "dependencies": { - "@vuetify/loader-shared": "^2.0.1", + "@vuetify/loader-shared": "^2.1.2", "debug": "^4.3.3", "upath": "^2.0.1" }, "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "peerDependencies": { - "vite": ">=5", - "vue": "^3.0.0", - "vuetify": "^3.0.0" - } - }, - "node_modules/vite/node_modules/rollup": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.9.5.tgz", - "integrity": "sha512-E4vQW0H/mbNMw2yLSqJyjtkHY9dslf/p0zuT1xehNRqUTBOFMqEjguDvqhXr7N7r/4ttb2jr4T41d3dncmIgbQ==", - "dependencies": { - "@types/estree": "1.0.5" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.9.5", - "@rollup/rollup-android-arm64": "4.9.5", - "@rollup/rollup-darwin-arm64": "4.9.5", - "@rollup/rollup-darwin-x64": "4.9.5", - "@rollup/rollup-linux-arm-gnueabihf": "4.9.5", - "@rollup/rollup-linux-arm64-gnu": "4.9.5", - "@rollup/rollup-linux-arm64-musl": "4.9.5", - "@rollup/rollup-linux-riscv64-gnu": "4.9.5", - "@rollup/rollup-linux-x64-gnu": "4.9.5", - "@rollup/rollup-linux-x64-musl": "4.9.5", - "@rollup/rollup-win32-arm64-msvc": "4.9.5", - "@rollup/rollup-win32-ia32-msvc": "4.9.5", - "@rollup/rollup-win32-x64-msvc": "4.9.5", - "fsevents": "~2.3.2" - } - }, - "node_modules/vitest": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.31.4.tgz", - "integrity": "sha512-GoV0VQPmWrUFOZSg3RpQAPN+LPmHg2/gxlMNJlyxJihkz6qReHDV6b0pPDcqFLNEPya4tWJ1pgwUNP9MLmUfvQ==", - "dev": true, - "dependencies": { - "@types/chai": "^4.3.5", - "@types/chai-subset": "^1.3.3", - "@types/node": "*", - "@vitest/expect": "0.31.4", - "@vitest/runner": "0.31.4", - "@vitest/snapshot": "0.31.4", - "@vitest/spy": "0.31.4", - "@vitest/utils": "0.31.4", - "acorn": "^8.8.2", - "acorn-walk": "^8.2.0", - "cac": "^6.7.14", - "chai": "^4.3.7", - "concordance": "^5.0.4", - "debug": "^4.3.4", - "local-pkg": "^0.4.3", - "magic-string": "^0.30.0", - "pathe": "^1.1.0", - "picocolors": "^1.0.0", - "std-env": "^3.3.2", - "strip-literal": "^1.0.1", - "tinybench": "^2.5.0", - "tinypool": "^0.5.0", - "vite": "^3.0.0 || ^4.0.0", - "vite-node": "0.31.4", - "why-is-node-running": "^2.2.2" - }, - "bin": { - "vitest": "vitest.mjs" - }, - "engines": { - "node": ">=v14.18.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" + "node": "^18.0.0 || >=20.0.0" }, "peerDependencies": { - "@edge-runtime/vm": "*", - "@vitest/browser": "*", - "@vitest/ui": "*", - "happy-dom": "*", - "jsdom": "*", - "playwright": "*", - "safaridriver": "*", - "webdriverio": "*" - }, - "peerDependenciesMeta": { - "@edge-runtime/vm": { - "optional": true - }, - "@vitest/browser": { - "optional": true - }, - "@vitest/ui": { - "optional": true - }, - "happy-dom": { - "optional": true - }, - "jsdom": { - "optional": true - }, - "playwright": { - "optional": true - }, - "safaridriver": { - "optional": true - }, - "webdriverio": { - "optional": true - } + "vite": ">=5", + "vue": "^3.0.0", + "vuetify": ">=3" + } + }, + "node_modules/vite/node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/android-arm": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz", - "integrity": "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==", + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", "cpu": [ "arm" ], - "dev": true, "optional": true, "os": [ "android" @@ -9178,14 +11355,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/android-arm64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz", - "integrity": "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==", + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "android" @@ -9194,14 +11370,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/android-x64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.20.tgz", - "integrity": "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==", + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "android" @@ -9210,14 +11385,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/darwin-arm64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz", - "integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==", + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "darwin" @@ -9226,14 +11400,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/darwin-x64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz", - "integrity": "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==", + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "darwin" @@ -9242,14 +11415,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/freebsd-arm64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz", - "integrity": "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==", + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "freebsd" @@ -9258,14 +11430,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/freebsd-x64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz", - "integrity": "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==", + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "freebsd" @@ -9274,14 +11445,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-arm": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz", - "integrity": "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==", + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", "cpu": [ "arm" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9290,14 +11460,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-arm64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz", - "integrity": "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==", + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9306,14 +11475,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-ia32": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz", - "integrity": "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==", + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", "cpu": [ "ia32" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9322,14 +11490,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-loong64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz", - "integrity": "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==", + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", "cpu": [ "loong64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9338,14 +11505,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-mips64el": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz", - "integrity": "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==", + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", "cpu": [ "mips64el" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9354,14 +11520,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-ppc64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz", - "integrity": "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==", + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", "cpu": [ "ppc64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9370,14 +11535,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-riscv64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz", - "integrity": "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==", + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", "cpu": [ "riscv64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9386,14 +11550,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-s390x": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz", - "integrity": "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==", + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", "cpu": [ "s390x" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9402,14 +11565,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/linux-x64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz", - "integrity": "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==", + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -9418,14 +11580,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/netbsd-x64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz", - "integrity": "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==", + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "netbsd" @@ -9434,14 +11595,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/openbsd-x64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz", - "integrity": "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==", + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "openbsd" @@ -9450,14 +11610,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/sunos-x64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz", - "integrity": "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==", + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "sunos" @@ -9466,14 +11625,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/win32-arm64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz", - "integrity": "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==", + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "win32" @@ -9482,14 +11640,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/win32-ia32": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz", - "integrity": "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==", + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", "cpu": [ "ia32" ], - "dev": true, "optional": true, "os": [ "win32" @@ -9498,14 +11655,13 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/@esbuild/win32-x64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz", - "integrity": "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==", + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "win32" @@ -9514,11 +11670,10 @@ "node": ">=12" } }, - "node_modules/vitest/node_modules/esbuild": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz", - "integrity": "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==", - "dev": true, + "node_modules/vite/node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", "hasInstallScript": true, "bin": { "esbuild": "bin/esbuild" @@ -9527,118 +11682,180 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/android-arm": "0.18.20", - "@esbuild/android-arm64": "0.18.20", - "@esbuild/android-x64": "0.18.20", - "@esbuild/darwin-arm64": "0.18.20", - "@esbuild/darwin-x64": "0.18.20", - "@esbuild/freebsd-arm64": "0.18.20", - "@esbuild/freebsd-x64": "0.18.20", - "@esbuild/linux-arm": "0.18.20", - "@esbuild/linux-arm64": "0.18.20", - "@esbuild/linux-ia32": "0.18.20", - "@esbuild/linux-loong64": "0.18.20", - "@esbuild/linux-mips64el": "0.18.20", - "@esbuild/linux-ppc64": "0.18.20", - "@esbuild/linux-riscv64": "0.18.20", - "@esbuild/linux-s390x": "0.18.20", - "@esbuild/linux-x64": "0.18.20", - "@esbuild/netbsd-x64": "0.18.20", - "@esbuild/openbsd-x64": "0.18.20", - "@esbuild/sunos-x64": "0.18.20", - "@esbuild/win32-arm64": "0.18.20", - "@esbuild/win32-ia32": "0.18.20", - "@esbuild/win32-x64": "0.18.20" - } - }, - "node_modules/vitest/node_modules/vite": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.1.tgz", - "integrity": "sha512-AXXFaAJ8yebyqzoNB9fu2pHoo/nWX+xZlaRwoeYUxEqBO+Zj4msE5G+BhGBll9lYEKv9Hfks52PAF2X7qDYXQA==", - "dev": true, - "dependencies": { - "esbuild": "^0.18.10", - "postcss": "^8.4.27", - "rollup": "^3.27.1" + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/vite/node_modules/rollup": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.28.1.tgz", + "integrity": "sha512-61fXYl/qNVinKmGSTHAZ6Yy8I3YIJC/r2m9feHo6SwVAVcLT5MPwOUFe7EuURA/4m0NR8lXG4BBXuo/IZEsjMg==", + "dependencies": { + "@types/estree": "1.0.6" }, "bin": { - "vite": "bin/vite.js" + "rollup": "dist/bin/rollup" }, "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" + "node": ">=18.0.0", + "npm": ">=8.0.0" }, "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.28.1", + "@rollup/rollup-android-arm64": "4.28.1", + "@rollup/rollup-darwin-arm64": "4.28.1", + "@rollup/rollup-darwin-x64": "4.28.1", + "@rollup/rollup-freebsd-arm64": "4.28.1", + "@rollup/rollup-freebsd-x64": "4.28.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.28.1", + "@rollup/rollup-linux-arm-musleabihf": "4.28.1", + "@rollup/rollup-linux-arm64-gnu": "4.28.1", + "@rollup/rollup-linux-arm64-musl": "4.28.1", + "@rollup/rollup-linux-loongarch64-gnu": "4.28.1", + "@rollup/rollup-linux-powerpc64le-gnu": "4.28.1", + "@rollup/rollup-linux-riscv64-gnu": "4.28.1", + "@rollup/rollup-linux-s390x-gnu": "4.28.1", + "@rollup/rollup-linux-x64-gnu": "4.28.1", + "@rollup/rollup-linux-x64-musl": "4.28.1", + "@rollup/rollup-win32-arm64-msvc": "4.28.1", + "@rollup/rollup-win32-ia32-msvc": "4.28.1", + "@rollup/rollup-win32-x64-msvc": "4.28.1", "fsevents": "~2.3.2" + } + }, + "node_modules/vitest": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-2.1.9.tgz", + "integrity": "sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "2.1.9", + "@vitest/mocker": "2.1.9", + "@vitest/pretty-format": "^2.1.9", + "@vitest/runner": "2.1.9", + "@vitest/snapshot": "2.1.9", + "@vitest/spy": "2.1.9", + "@vitest/utils": "2.1.9", + "chai": "^5.1.2", + "debug": "^4.3.7", + "expect-type": "^1.1.0", + "magic-string": "^0.30.12", + "pathe": "^1.1.2", + "std-env": "^3.8.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.1", + "tinypool": "^1.0.1", + "tinyrainbow": "^1.2.0", + "vite": "^5.0.0", + "vite-node": "2.1.9", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "@types/node": ">= 14", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" + "@edge-runtime/vm": "*", + "@types/node": "^18.0.0 || >=20.0.0", + "@vitest/browser": "2.1.9", + "@vitest/ui": "2.1.9", + "happy-dom": "*", + "jsdom": "*" }, "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "less": { + "@edge-runtime/vm": { "optional": true }, - "lightningcss": { + "@types/node": { "optional": true }, - "sass": { + "@vitest/browser": { "optional": true }, - "stylus": { + "@vitest/ui": { "optional": true }, - "sugarss": { + "happy-dom": { "optional": true }, - "terser": { + "jsdom": { "optional": true } } }, "node_modules/vitest/node_modules/vite-node": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-0.31.4.tgz", - "integrity": "sha512-uzL377GjJtTbuc5KQxVbDu2xfU/x0wVjUtXQR2ihS21q/NK6ROr4oG0rsSkBBddZUVCwzfx22in76/0ZZHXgkQ==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.1.9.tgz", + "integrity": "sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==", "dev": true, + "license": "MIT", "dependencies": { "cac": "^6.7.14", - "debug": "^4.3.4", - "mlly": "^1.2.0", - "pathe": "^1.1.0", - "picocolors": "^1.0.0", - "vite": "^3.0.0 || ^4.0.0" + "debug": "^4.3.7", + "es-module-lexer": "^1.5.4", + "pathe": "^1.1.2", + "vite": "^5.0.0" }, "bin": { "vite-node": "vite-node.mjs" }, "engines": { - "node": ">=v14.18.0" + "node": "^18.0.0 || >=20.0.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, + "node_modules/vm-browserify": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", + "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==", + "dev": true + }, + "node_modules/vscode-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz", + "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==", + "dev": true, + "license": "MIT" + }, "node_modules/vue": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/vue/-/vue-3.4.5.tgz", - "integrity": "sha512-VH6nHFhLPjgu2oh5vEBXoNZxsGHuZNr3qf4PHClwJWw6IDqw6B3x+4J+ABdoZ0aJuT8Zi0zf3GpGlLQCrGWHrw==", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.13.tgz", + "integrity": "sha512-wmeiSMxkZCSc+PM2w2VRsOYAZC8GdipNFRTsLSfodVqI9mbejKeXEGr8SckuLnrQPGe3oJN5c3K0vpoU9q/wCQ==", "dependencies": { - "@vue/compiler-dom": "3.4.5", - "@vue/compiler-sfc": "3.4.5", - "@vue/runtime-dom": "3.4.5", - "@vue/server-renderer": "3.4.5", - "@vue/shared": "3.4.5" + "@vue/compiler-dom": "3.5.13", + "@vue/compiler-sfc": "3.5.13", + "@vue/runtime-dom": "3.5.13", + "@vue/server-renderer": "3.5.13", + "@vue/shared": "3.5.13" }, "peerDependencies": { "typescript": "*" @@ -9672,148 +11889,164 @@ } }, "node_modules/vue-component-type-helpers": { - "version": "1.8.25", - "resolved": "https://registry.npmjs.org/vue-component-type-helpers/-/vue-component-type-helpers-1.8.25.tgz", - "integrity": "sha512-NCA6sekiJIMnMs4DdORxATXD+/NRkQpS32UC+I1KQJUasx+Z7MZUb3Y+MsKsFmX+PgyTYSteb73JW77AibaCCw==", - "dev": true + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/vue-component-type-helpers/-/vue-component-type-helpers-3.2.2.tgz", + "integrity": "sha512-x8C2nx5XlUNM0WirgfTkHjJGO/ABBxlANZDtHw2HclHtQnn+RFPTnbjMJn8jHZW4TlUam0asHcA14lf1C6Jb+A==", + "dev": true, + "license": "MIT" }, "node_modules/vue-eslint-parser": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-9.4.0.tgz", - "integrity": "sha512-7KsNBb6gHFA75BtneJsoK/dbZ281whUIwFYdQxA68QrCrGMXYzUMbPDHGcOQ0OocIVKrWSKWXZ4mL7tonCXoUw==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-10.2.0.tgz", + "integrity": "sha512-CydUvFOQKD928UzZhTp4pr2vWz1L+H99t7Pkln2QSPdvmURT0MoC4wUccfCnuEaihNsu9aYYyk+bep8rlfkUXw==", + "dev": true, + "license": "MIT", "dependencies": { - "debug": "^4.3.4", - "eslint-scope": "^7.1.1", - "eslint-visitor-keys": "^3.3.0", - "espree": "^9.3.1", - "esquery": "^1.4.0", - "lodash": "^4.17.21", - "semver": "^7.3.6" + "debug": "^4.4.0", + "eslint-scope": "^8.2.0", + "eslint-visitor-keys": "^4.2.0", + "espree": "^10.3.0", + "esquery": "^1.6.0", + "semver": "^7.6.3" }, "engines": { - "node": "^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "url": "https://github.com/sponsors/mysticatea" }, "peerDependencies": { - "eslint": ">=6.0.0" + "eslint": "^8.57.0 || ^9.0.0" } }, "node_modules/vue-eslint-parser/node_modules/eslint-scope": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", - "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/vue-eslint-parser/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "url": "https://opencollective.com/eslint" } }, "node_modules/vue-gtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/vue-gtag/-/vue-gtag-2.0.1.tgz", - "integrity": "sha512-aM4A58FVL0wV2ptYi+xzAjeg+pQVRyUcfBc5UkXAwQrR4t3WBhor50Izp2I+3Oo7+l+vWJ7u78DGcNzReb8S/A==", + "version": "3.6.3", + "resolved": "https://registry.npmjs.org/vue-gtag/-/vue-gtag-3.6.3.tgz", + "integrity": "sha512-IuSPUjHG63BVqUEpV1xHeKQd/jVe2tQVdrwyoy8flxEem0wLRoEWmxmD+1I+zjezvxQbt1NsGhf7zCwRrxbOTQ==", + "license": "MIT", "peerDependencies": { - "vue": "^3.0.0" + "vue": "^3.5.13", + "vue-router": "^4.5.0" + }, + "peerDependenciesMeta": { + "vue-router": { + "optional": true + } } }, "node_modules/vue-router": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.2.5.tgz", - "integrity": "sha512-DIUpKcyg4+PTQKfFPX88UWhlagBEBEfJ5A8XDXRJLUnZOvcpMF8o/dnL90vpVkGaPbjvXazV/rC1qBKrZlFugw==", + "version": "4.6.4", + "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.4.tgz", + "integrity": "sha512-Hz9q5sa33Yhduglwz6g9skT8OBPii+4bFn88w6J+J4MfEo4KRRpmiNG/hHHkdbRFlLBOqxN8y8gf2Fb0MTUgVg==", + "license": "MIT", "dependencies": { - "@vue/devtools-api": "^6.5.0" + "@vue/devtools-api": "^6.6.4" }, "funding": { "url": "https://github.com/sponsors/posva" }, "peerDependencies": { - "vue": "^3.2.0" + "vue": "^3.5.0" } }, "node_modules/vue-stripe-js": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/vue-stripe-js/-/vue-stripe-js-1.0.1.tgz", - "integrity": "sha512-f2u+YkN42uHrUZbYiiqEQ+DtCyPQgJ/BdzU35EwhFqJghS6k2n8tw/VhYm35TjLsxJE+tYO7/pBUdTS+Xu5Rrw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/vue-stripe-js/-/vue-stripe-js-1.0.4.tgz", + "integrity": "sha512-TDfbucH1tnKug6VmZvi4xTIB41CN1CH3hC/GfPT/JWraZ4THFQUXk03R46+KY1e5P4fqr0vUzonZhZAUZHRrxg==", "dev": true }, - "node_modules/vue-template-compiler": { - "version": "2.7.16", - "resolved": "https://registry.npmjs.org/vue-template-compiler/-/vue-template-compiler-2.7.16.tgz", - "integrity": "sha512-AYbUWAJHLGGQM7+cNTELw+KsOG9nl2CnSv467WobS5Cv9uk3wFcnr1Etsz2sEIHEZvw1U+o9mRlEO6QbZvUPGQ==", - "dev": true, - "dependencies": { - "de-indent": "^1.0.2", - "he": "^1.2.0" - } - }, "node_modules/vue-tsc": { - "version": "1.8.27", - "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-1.8.27.tgz", - "integrity": "sha512-WesKCAZCRAbmmhuGl3+VrdWItEvfoFIPXOvUJkjULi+x+6G/Dy69yO3TBRJDr9eUlmsNAwVmxsNZxvHKzbkKdg==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-3.2.4.tgz", + "integrity": "sha512-xj3YCvSLNDKt1iF9OcImWHhmYcihVu9p4b9s4PGR/qp6yhW+tZJaypGxHScRyOrdnHvaOeF+YkZOdKwbgGvp5g==", "dev": true, + "license": "MIT", "dependencies": { - "@volar/typescript": "~1.11.1", - "@vue/language-core": "1.8.27", - "semver": "^7.5.4" + "@volar/typescript": "2.4.27", + "@vue/language-core": "3.2.4" }, "bin": { "vue-tsc": "bin/vue-tsc.js" }, "peerDependencies": { - "typescript": "*" + "typescript": ">=5.0.0" } }, "node_modules/vue/node_modules/@vue/reactivity": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.5.tgz", - "integrity": "sha512-BcWkKvjdvqJwb7BhhFkXPLDCecX4d4a6GATvCduJQDLv21PkPowAE5GKuIE5p6RC07/Lp9FMkkq4AYCTVF5KlQ==", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.13.tgz", + "integrity": "sha512-NaCwtw8o48B9I6L1zl2p41OHo/2Z4wqYGGIK1Khu5T7yxrn+ATOixn/Udn2m+6kZKB/J7cuT9DbWWhRxqixACg==", + "license": "MIT", "dependencies": { - "@vue/shared": "3.4.5" + "@vue/shared": "3.5.13" } }, "node_modules/vue/node_modules/@vue/runtime-core": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.5.tgz", - "integrity": "sha512-wh9ELIOQKeWT9SaUPdLrsxRkZv14jp+SJm9aiQGWio+/MWNM3Lib0wE6CoKEqQ9+SCYyGjDBhTOTtO47kCgbkg==", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.13.tgz", + "integrity": "sha512-Fj4YRQ3Az0WTZw1sFe+QDb0aXCerigEpw418pw1HBUKFtnQHWzwojaukAs2X/c9DQz4MQ4bsXTGlcpGxU/RCIw==", + "license": "MIT", "dependencies": { - "@vue/reactivity": "3.4.5", - "@vue/shared": "3.4.5" + "@vue/reactivity": "3.5.13", + "@vue/shared": "3.5.13" } }, "node_modules/vue/node_modules/@vue/runtime-dom": { - "version": "3.4.5", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.5.tgz", - "integrity": "sha512-n5ewvOjyG3IEpqGBahdPXODFSpVlSz3H4LF76Sx0XAqpIOqyJ5bIb2PrdYuH2ogBMAQPh+o5tnoH4nJpBr8U0Q==", - "dependencies": { - "@vue/runtime-core": "3.4.5", - "@vue/shared": "3.4.5", + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.13.tgz", + "integrity": "sha512-dLaj94s93NYLqjLiyFzVs9X6dWhTdAlEAciC3Moq7gzAc13VJUdCnjjRurNM6uTLFATRHexHCTu/Xp3eW6yoog==", + "license": "MIT", + "dependencies": { + "@vue/reactivity": "3.5.13", + "@vue/runtime-core": "3.5.13", + "@vue/shared": "3.5.13", "csstype": "^3.1.3" } }, "node_modules/vuetify": { - "version": "3.4.10", - "resolved": "https://registry.npmjs.org/vuetify/-/vuetify-3.4.10.tgz", - "integrity": "sha512-tObGoumCJxuK26OpS/CLZZIJAgDtN2cnd31vJscVhpuf6jeMD7wh8IsgfZownAOXU1FcKWVQwn1RSDsoXk5cJA==", - "engines": { - "node": "^12.20 || >=14.13" - }, + "version": "3.11.7", + "resolved": "https://registry.npmjs.org/vuetify/-/vuetify-3.11.7.tgz", + "integrity": "sha512-3nK1mKTXQRbU4QXukV4WIbs5YZgMK19flHpFq3pU+6Fpa5YLB8RyyK1BLWAW8JmhSVcaqVUcB/EJ3oJ8g3XNCw==", + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/johnleider" }, "peerDependencies": { "typescript": ">=4.7", - "vite-plugin-vuetify": ">=1.0.0-alpha.12", - "vue": "^3.3.0", - "vue-i18n": "^9.0.0", - "webpack-plugin-vuetify": ">=2.0.0-alpha.11" + "vite-plugin-vuetify": ">=2.1.0", + "vue": "^3.5.0", + "webpack-plugin-vuetify": ">=3.1.0" }, "peerDependenciesMeta": { "typescript": { @@ -9822,41 +12055,36 @@ "vite-plugin-vuetify": { "optional": true }, - "vue-i18n": { - "optional": true - }, "webpack-plugin-vuetify": { "optional": true } } }, - "node_modules/vuex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/vuex/-/vuex-4.1.0.tgz", - "integrity": "sha512-hmV6UerDrPcgbSy9ORAtNXDr9M4wlNP4pEFKye4ujJF8oqgFFuxDCdOLS3eNoRTtq5O3hoBDh9Doj1bQMYHRbQ==", + "node_modules/w3c-xmlserializer": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", + "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==", + "dev": true, "dependencies": { - "@vue/devtools-api": "^6.0.0-beta.11" + "xml-name-validator": "^5.0.0" }, - "peerDependencies": { - "vue": "^3.2.0" + "engines": { + "node": ">=18" } }, - "node_modules/w3c-xmlserializer": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz", - "integrity": "sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==", + "node_modules/w3c-xmlserializer/node_modules/xml-name-validator": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", + "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==", "dev": true, - "dependencies": { - "xml-name-validator": "^4.0.0" - }, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/watchpack": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz", - "integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz", + "integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==", "dev": true, "peer": true, "dependencies": { @@ -9891,35 +12119,34 @@ } }, "node_modules/webpack": { - "version": "5.83.1", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.83.1.tgz", - "integrity": "sha512-TNsG9jDScbNuB+Lb/3+vYolPplCS3bbEaJf+Bj0Gw4DhP3ioAflBb1flcRt9zsWITyvOhM96wMQNRWlSX52DgA==", + "version": "5.94.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz", + "integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==", "dev": true, "peer": true, "dependencies": { - "@types/eslint-scope": "^3.7.3", - "@types/estree": "^1.0.0", - "@webassemblyjs/ast": "^1.11.5", - "@webassemblyjs/wasm-edit": "^1.11.5", - "@webassemblyjs/wasm-parser": "^1.11.5", + "@types/estree": "^1.0.5", + "@webassemblyjs/ast": "^1.12.1", + "@webassemblyjs/wasm-edit": "^1.12.1", + "@webassemblyjs/wasm-parser": "^1.12.1", "acorn": "^8.7.1", - "acorn-import-assertions": "^1.7.6", - "browserslist": "^4.14.5", + "acorn-import-attributes": "^1.9.5", + "browserslist": "^4.21.10", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.14.0", + "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.2.9", + "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", - "schema-utils": "^3.1.2", + "schema-utils": "^3.2.0", "tapable": "^2.1.1", - "terser-webpack-plugin": "^5.3.7", - "watchpack": "^2.4.0", + "terser-webpack-plugin": "^5.3.10", + "watchpack": "^2.4.1", "webpack-sources": "^3.2.3" }, "bin": { @@ -9942,34 +12169,27 @@ "version": "3.2.3", "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "dev": true, + "peer": true, "engines": { "node": ">=10.13.0" } }, "node_modules/webpack-virtual-modules": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.1.tgz", - "integrity": "sha512-poXpCylU7ExuvZK8z+On3kX+S8o/2dQ/SVYueKA0D4WEMXROXgY8Ez50/bQEUmvoSMMrWcrJqCHuhAbsiwg7Dg==" - }, - "node_modules/well-known-symbols": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/well-known-symbols/-/well-known-symbols-2.0.0.tgz", - "integrity": "sha512-ZMjC3ho+KXo0BfJb7JgtQ5IBuvnShdlACNkKkdsqBmYw3bPAaJfPeYUo6tLUaT5tG/Gkh7xkpBhKRQ9e7pyg9Q==", - "dev": true, - "engines": { - "node": ">=6" - } + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.2.tgz", + "integrity": "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ==" }, "node_modules/whatwg-encoding": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz", - "integrity": "sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", + "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", "dev": true, "dependencies": { "iconv-lite": "0.6.3" }, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/whatwg-encoding/node_modules/iconv-lite": { @@ -9984,37 +12204,33 @@ "node": ">=0.10.0" } }, - "node_modules/whatwg-fetch": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-0.11.1.tgz", - "integrity": "sha512-e6bY9RSVKv3I6IYlqKyVXEOZ5ZDebjPKcShV44g2vM6a7LyQpp7jo6kaDdH/wyWaM/PMQA40qV2OIYI8b2dQ0Q==" - }, "node_modules/whatwg-mimetype": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", - "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", + "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", "dev": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/whatwg-url": { - "version": "12.0.1", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-12.0.1.tgz", - "integrity": "sha512-Ed/LrqB8EPlGxjS+TrsXcpUond1mhccS3pchLhzSgPCnTimUCKj3IZE75pAs5m6heB2U2TMerKFUXheyHY+VDQ==", + "version": "14.2.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", + "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", "dev": true, "dependencies": { - "tr46": "^4.1.1", + "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" }, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, "dependencies": { "isexe": "^2.0.0" }, @@ -10026,45 +12242,85 @@ } }, "node_modules/which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "dev": true, + "license": "MIT", "dependencies": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/which-collection": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", - "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "license": "MIT", "dependencies": { - "is-map": "^2.0.1", - "is-set": "^2.0.1", - "is-weakmap": "^2.0.1", - "is-weakset": "^2.0.1" + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/which-typed-array": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", - "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "license": "MIT", "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.10" + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -10074,9 +12330,9 @@ } }, "node_modules/why-is-node-running": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.2.2.tgz", - "integrity": "sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", "dev": true, "dependencies": { "siginfo": "^2.0.0", @@ -10090,14 +12346,16 @@ } }, "node_modules/word-wrap": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", - "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, "engines": { "node": ">=0.10.0" } }, - "node_modules/wrap-ansi": { + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", @@ -10117,12 +12375,13 @@ "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true }, "node_modules/ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==", "dev": true, "engines": { "node": ">=10.0.0" @@ -10144,6 +12403,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz", "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==", + "dev": true, "engines": { "node": ">=12" } @@ -10154,67 +12414,25 @@ "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", "dev": true }, - "node_modules/xterm": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/xterm/-/xterm-5.3.0.tgz", - "integrity": "sha512-8QqjlekLUFTrU6x7xck1MsPzPA571K5zNqWm0M0oroYEWVOptZ0+ubQSkQ3uxIEhcIHRujJy6emDWX4A7qyFzg==" - }, - "node_modules/xterm-addon-attach": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/xterm-addon-attach/-/xterm-addon-attach-0.8.0.tgz", - "integrity": "sha512-k8N5boSYn6rMJTTNCgFpiSTZ26qnYJf3v/nJJYexNO2sdAHDN3m1ivVQWVZ8CHJKKnZQw1rc44YP2NtgalWHfQ==", - "peerDependencies": { - "xterm": "^5.0.0" - } - }, - "node_modules/xterm-addon-fit": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/xterm-addon-fit/-/xterm-addon-fit-0.7.0.tgz", - "integrity": "sha512-tQgHGoHqRTgeROPnvmtEJywLKoC/V9eNs4bLLz7iyJr1aW/QFzRwfd3MGiJ6odJd9xEfxcW36/xRU47JkD5NKQ==", - "peerDependencies": { - "xterm": "^5.0.0" - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", "dev": true, - "dependencies": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - }, "engines": { - "node": ">=10" + "node": ">=0.4" } }, - "node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "dev": true, - "engines": { - "node": ">=10" - } + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, "engines": { "node": ">=10" }, @@ -10223,10 +12441,11 @@ } }, "node_modules/yup": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/yup/-/yup-1.3.3.tgz", - "integrity": "sha512-v8QwZSsHH2K3/G9WSkp6mZKO+hugKT1EmnMqLNUcfu51HU9MDyhlETT/JgtzprnrnQHPWsjc6MUDMBp/l9fNnw==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/yup/-/yup-1.7.1.tgz", + "integrity": "sha512-GKHFX2nXul2/4Dtfxhozv701jLQHdf6J34YDh2cEkpqoo8le5Mg6/LrdseVLrFarmFygZTlfIhHx/QKfb/QWXw==", "dev": true, + "license": "MIT", "dependencies": { "property-expr": "^2.0.5", "tiny-case": "^1.0.3", diff --git a/ui/package.json b/ui/package.json index 5701ed7efed..cdc11424527 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,94 +1,111 @@ { "name": "ui", "version": "0.0.0", + "type": "module", "private": true, "scripts": { "serve": "vite preview --host", "build": "vue-tsc --noEmit && vite build", "dev": "vite --host", "preview": "vite preview", - "lint": "eslint --ext .ts,.vue --ignore-path .gitignore --no-fix src", - "lint-fix": "eslint --ext .ts,.vue --ignore-path .gitignore --fix src", - "lint-fix:test": "eslint --ext .ts,.vue --ignore-path .gitignore --fix tests", + "lint": "eslint --no-fix .", + "lint-fix": "eslint --fix .", "test": "TZ=UTC vitest", "test-ui": "TZ=UTC vitest --ui", "test-coverage": "vitest run --coverage" }, "dependencies": { "@esbuild-plugins/node-globals-polyfill": "^0.2.3", - "@fortawesome/fontawesome-free": "^6.4.0", - "@fortawesome/fontawesome-svg-core": "^6.4.0", - "@fortawesome/free-brands-svg-icons": "^6.4.0", - "@fortawesome/free-regular-svg-icons": "^6.4.0", - "@fortawesome/free-solid-svg-icons": "^6.4.0", - "@mdi/font": "7.2.96", + "@fontsource/anonymous-pro": "^5.2.8", + "@fontsource/fira-code": "^5.2.7", + "@fontsource/inconsolata": "^5.2.8", + "@fontsource/jetbrains-mono": "^5.2.8", + "@fontsource/noto-mono": "^5.2.5", + "@fontsource/source-code-pro": "^5.2.7", + "@fontsource/ubuntu-mono": "^5.2.8", + "@fortawesome/fontawesome-free": "^7.0.0", + "@fortawesome/fontawesome-svg-core": "^7.0.0", + "@fortawesome/free-brands-svg-icons": "^7.0.0", + "@fortawesome/free-regular-svg-icons": "^7.0.0", + "@fortawesome/free-solid-svg-icons": "^7.0.0", + "@mdi/font": "^7.4.47", + "@productdevbook/chatwoot": "github:shellhub-io/chatwoot#build", "@rollup/plugin-inject": "^5.0.3", "@rushstack/eslint-patch": "^1.2.0", "@sentry/tracing": "^7.52.1", "@sentry/vue": "^7.91.0", "@stripe/stripe-js": "^1.38.1", - "@vue/eslint-config-airbnb": "^7.0.0", - "@vue/eslint-config-typescript": "^11.0.2", + "@tinymce/tinymce-vue": "^6.1.0", "@vue/runtime-dom": "^3.2.40", - "@vueuse/core": "^8.2.0", + "@vueuse/core": "^13.0.0", + "@xterm/addon-attach": "^0.11.0", + "@xterm/addon-fit": "^0.10.0", + "@xterm/xterm": "^5.5.0", "assert": "^2.0.0", "axios": "^1.4.0", "dotenv": "^16.0.3", "express": "^4.18.1", + "file-saver": "^2.0.5", "font-logos": "^1.0.0", - "gitter-sidecar": "^1.5.0", - "markdown-it": "^13.0.1", + "markdown-it": "^14.0.0", "moment": "^2.29.4", "node-rsa": "^1.1.1", + "pinia": "^3.0.1", "qrcode.vue": "^3.4.1", - "sass": "^1.55.0", + "sass": "^1.83.0", "sshpk": "^1.17.0", - "unplugin-vue-markdown": "^0.25.2", + "turndown": "^7.2.0", + "unplugin-vue-markdown": "^29.0.0", "vee-validate": "^4.12.4", "vite-node": "^1.2.0", - "vue": "^3.4.5", - "vue-gtag": "^2.0.1", + "vue": "^3.5.13", + "vue-gtag": "^3.0.0", "vue-router": "4", "vuetify": "^3.4.9", - "vuex": "^4.0.2", "webcrypto": "^0.1.1", - "webfontloader": "^1.6.28", - "xterm": "^5.0.0", - "xterm-addon-attach": "^0.8.0", - "xterm-addon-fit": "^0.7.0" + "webfontloader": "^1.6.28" }, "devDependencies": { + "@pinia/testing": "^1.0.3", "@rollup/plugin-node-resolve": "^15.0.1", + "@stylistic/eslint-plugin": "^5.5.0", "@types/node": "^18.8.1", "@types/node-rsa": "^1.1.1", "@types/sshpk": "^1.17.0", "@types/webfontloader": "^1.6.35", - "@typescript-eslint/eslint-plugin": "^5.39.0", - "@typescript-eslint/parser": "^5.39.0", "@vitejs/plugin-vue": "^5.0.3", - "@vitest/coverage-c8": "^0.31.0", - "@vitest/ui": "^0.28.2", - "@vue/test-utils": "^2.1.0", + "@vitest/coverage-v8": "^2.1.8", + "@vitest/ui": "^2.1.8", + "@vue/eslint-config-typescript": "^14.6.0", + "@vue/test-utils": "^2.4.6", + "asciinema-player": "^3.9.0", "axios-mock-adapter": "^1.21.4", "buffer": "^6.0.3", - "eslint": "^8.24.0", - "eslint-plugin-vue": "^9.6.0", + "eslint": "^9.38.0", + "eslint-plugin-import": "^2.32.0", + "eslint-plugin-jest": "^29.0.1", + "eslint-plugin-prettier": "^5.5.4", + "eslint-plugin-vue": "^9.33.0", "events": "^3.3.0", - "jsdom": "^21.0.0", + "jsdom": "^26.0.0", "process": "0.11.10", "resize-observer-polyfill": "^1.5.1", "rollup-plugin-polyfill-node": "^0.12.0", "terser": "^5.15.0", "typescript": "^5.0.4", + "typescript-eslint": "^8.46.2", "util": "^0.12.4", "vi-canvas-mock": "^1.0.0", "vite": "^5.0.11", + "vite-plugin-node-polyfills": "^0.23.0", "vite-plugin-require-transform": "^1.0.21", "vite-plugin-vuetify": "^2.0.1", - "vitest": "^0.31.0", - "vue-cli-plugin-vuetify": "~2.5.8", + "vitest": "^2.1.8", + "vue-cli-plugin-vuetify": "^2.5.8", + "vue-component-type-helpers": "^3.2.2", + "vue-eslint-parser": "^10.2.0", "vue-stripe-js": "^1.0.1", - "vue-tsc": "^1.2.0", + "vue-tsc": "^3.0.0", "yup": "^1.0.0" } } diff --git a/ui/public/bg-inverted.svg b/ui/public/bg-inverted.svg new file mode 100644 index 00000000000..113a7aad284 --- /dev/null +++ b/ui/public/bg-inverted.svg @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/ui/public/bg.svg b/ui/public/bg.svg new file mode 100644 index 00000000000..45ba08929f9 --- /dev/null +++ b/ui/public/bg.svg @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ui/public/env.js b/ui/public/env.js deleted file mode 100644 index d268b7d3ae4..00000000000 --- a/ui/public/env.js +++ /dev/null @@ -1 +0,0 @@ -{}; diff --git a/connector/.keys/.gitkeep b/ui/public/healthcheck similarity index 100% rename from connector/.keys/.gitkeep rename to ui/public/healthcheck diff --git a/ui/public/xterm-themes/atom_one_dark.json b/ui/public/xterm-themes/atom_one_dark.json new file mode 100644 index 00000000000..e68b6e5e17f --- /dev/null +++ b/ui/public/xterm-themes/atom_one_dark.json @@ -0,0 +1,22 @@ +{ + "foreground": "#abb2bf", + "background": "#1e2127", + "cursor": "#5c6370", + "selection": "#abb2bf80", + "black": "#000000", + "red": "#ca6169", + "green": "#82a568", + "yellow": "#bf8c5d", + "blue": "#56a2e1", + "magenta": "#b76ccd", + "cyan": "#4e9aa3", + "white": "#c5cbd6", + "brightBlack": "#5c6370", + "brightRed": "#e77c84", + "brightGreen": "#b4e294", + "brightYellow": "#e9b17b", + "brightBlue": "#7ec5ff", + "brightMagenta": "#db8df2", + "brightCyan": "#64cfdd", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/atom_one_light.json b/ui/public/xterm-themes/atom_one_light.json new file mode 100644 index 00000000000..c82eefba8f3 --- /dev/null +++ b/ui/public/xterm-themes/atom_one_light.json @@ -0,0 +1,22 @@ +{ + "foreground": "#383a42", + "background": "#f9f9f9", + "cursor": "#383a42", + "selection": "#383a4280", + "black": "#000000", + "red": "#e45649", + "green": "#4c9b4b", + "yellow": "#c99525", + "blue": "#4078f2", + "magenta": "#a626a4", + "cyan": "#0184bc", + "white": "#b8b9bf", + "brightBlack": "#474747", + "brightRed": "#ff7468", + "brightGreen": "#74ca72", + "brightYellow": "#dba633", + "brightBlue": "#6a99ff", + "brightMagenta": "#c142bf", + "brightCyan": "#00b1fd", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/basic.json b/ui/public/xterm-themes/basic.json new file mode 100644 index 00000000000..382579a3b88 --- /dev/null +++ b/ui/public/xterm-themes/basic.json @@ -0,0 +1,22 @@ +{ + "foreground": "#000000", + "background": "#ffffff", + "cursor": "#7f7f7f", + "selection": "#00000080", + "black": "#2e2e2e", + "red": "#c61a1a", + "green": "#007900", + "yellow": "#999900", + "blue": "#0f48cd", + "magenta": "#b200b2", + "cyan": "#3fc1dd", + "white": "#acacac", + "brightBlack": "#757575", + "brightRed": "#ff3e3e", + "brightGreen": "#00b300", + "brightYellow": "#d4d400", + "brightBlue": "#316fff", + "brightMagenta": "#ff60c9", + "brightCyan": "#6ce5ff", + "brightWhite": "#cac5c5" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/dracula.json b/ui/public/xterm-themes/dracula.json new file mode 100644 index 00000000000..726b73fced0 --- /dev/null +++ b/ui/public/xterm-themes/dracula.json @@ -0,0 +1,22 @@ +{ + "foreground": "#f8f8f2", + "background": "#282a36", + "cursor": "#bbbbbb", + "selection": "#f8f8f280", + "black": "#000000", + "red": "#e04242", + "green": "#45e16c", + "yellow": "#e3ec7d", + "blue": "#9b7dc6", + "magenta": "#e469b0", + "cyan": "#8be9fd", + "white": "#cac5c5", + "brightBlack": "#4a4a4a", + "brightRed": "#ff5555", + "brightGreen": "#b5ffc8", + "brightYellow": "#fff9c8", + "brightBlue": "#c8a1ff", + "brightMagenta": "#ff8cce", + "brightCyan": "#c8f5ff", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/grass.json b/ui/public/xterm-themes/grass.json new file mode 100644 index 00000000000..07cce09e1d5 --- /dev/null +++ b/ui/public/xterm-themes/grass.json @@ -0,0 +1,22 @@ +{ + "foreground": "#fff0a5", + "background": "#13773d", + "cursor": "#8c2800", + "selection": "#fff0a580", + "black": "#000000", + "red": "#9a183c", + "green": "#6eb95e", + "yellow": "#ffa673", + "blue": "#00378a", + "magenta": "#771361", + "cyan": "#3bcbea", + "white": "#939393", + "brightBlack": "#393939", + "brightRed": "#e0692f", + "brightGreen": "#b2ffa2", + "brightYellow": "#ffc27b", + "brightBlue": "#2380c4", + "brightMagenta": "#ec88c2", + "brightCyan": "#70e4ff", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/gruvbox_dark.json b/ui/public/xterm-themes/gruvbox_dark.json new file mode 100644 index 00000000000..d3b5e7cd9fd --- /dev/null +++ b/ui/public/xterm-themes/gruvbox_dark.json @@ -0,0 +1,22 @@ +{ + "foreground": "#ebdbb2", + "background": "#282828", + "cursor": "#ebdbb2", + "selection": "#ebdbb280", + "black": "#151515", + "red": "#cc241d", + "green": "#98971a", + "yellow": "#d79921", + "blue": "#458588", + "magenta": "#b16286", + "cyan": "#689d6a", + "white": "#c3b198", + "brightBlack": "#695c50", + "brightRed": "#fb4934", + "brightGreen": "#b8bb26", + "brightYellow": "#fabd2f", + "brightBlue": "#83a598", + "brightMagenta": "#f59db5", + "brightCyan": "#8ec07c", + "brightWhite": "#ebdbb2" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/gruvbox_light.json b/ui/public/xterm-themes/gruvbox_light.json new file mode 100644 index 00000000000..8904663d17f --- /dev/null +++ b/ui/public/xterm-themes/gruvbox_light.json @@ -0,0 +1,22 @@ +{ + "foreground": "#282828", + "background": "#fbf1c7", + "cursor": "#282828", + "selection": "#28282880", + "black": "#dfd6b1", + "red": "#9d0006", + "green": "#79740e", + "yellow": "#b57614", + "blue": "#076678", + "magenta": "#8f3f71", + "cyan": "#427b58", + "white": "#3c3836", + "brightBlack": "#9d8374", + "brightRed": "#cc241d", + "brightGreen": "#98971a", + "brightYellow": "#d79921", + "brightBlue": "#458588", + "brightMagenta": "#d180a5", + "brightCyan": "#689d69", + "brightWhite": "#7c6f64" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/homebrew.json b/ui/public/xterm-themes/homebrew.json new file mode 100644 index 00000000000..d859737817b --- /dev/null +++ b/ui/public/xterm-themes/homebrew.json @@ -0,0 +1,22 @@ +{ + "foreground": "#00ff00", + "background": "#000000", + "cursor": "#23ff18", + "selection": "#00ff0080", + "black": "#2e2e2e", + "red": "#c93434", + "green": "#348e48", + "yellow": "#e09e00", + "blue": "#0031e0", + "magenta": "#e235ff", + "cyan": "#3fc1dd", + "white": "#d0cfcf", + "brightBlack": "#5b5b5b", + "brightRed": "#ff6767", + "brightGreen": "#31ff31", + "brightYellow": "#ffdca8", + "brightBlue": "#4465da", + "brightMagenta": "#ff5fc8", + "brightCyan": "#8debff", + "brightWhite": "#e6e6e6" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/man_page.json b/ui/public/xterm-themes/man_page.json new file mode 100644 index 00000000000..116ca892c87 --- /dev/null +++ b/ui/public/xterm-themes/man_page.json @@ -0,0 +1,22 @@ +{ + "foreground": "#000000", + "background": "#fef49c", + "cursor": "#7f7f7f", + "selection": "#00000080", + "black": "#383838", + "red": "#9a183c", + "green": "#009100", + "yellow": "#be6600", + "blue": "#114695", + "magenta": "#b72fb9", + "cyan": "#3bcbea", + "white": "#959595", + "brightBlack": "#a7a7a7", + "brightRed": "#e0692f", + "brightGreen": "#00b400", + "brightYellow": "#ffb571", + "brightBlue": "#3392d6", + "brightMagenta": "#ec88c2", + "brightCyan": "#70e4ff", + "brightWhite": "#dadada" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/material_dark.json b/ui/public/xterm-themes/material_dark.json new file mode 100644 index 00000000000..6d09aca561d --- /dev/null +++ b/ui/public/xterm-themes/material_dark.json @@ -0,0 +1,22 @@ +{ + "foreground": "#e5e5e5", + "background": "#232322", + "cursor": "#16afca", + "selection": "#e5e5e580", + "black": "#040404", + "red": "#b7141f", + "green": "#457b24", + "yellow": "#f6981e", + "blue": "#134eb2", + "magenta": "#560088", + "cyan": "#0e717c", + "white": "#efefef", + "brightBlack": "#424242", + "brightRed": "#e83b3f", + "brightGreen": "#7aba3a", + "brightYellow": "#ffea2e", + "brightBlue": "#54a4f3", + "brightMagenta": "#aa4dbc", + "brightCyan": "#26bbd1", + "brightWhite": "#d9d9d9" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/material_light.json b/ui/public/xterm-themes/material_light.json new file mode 100644 index 00000000000..41dbddadca2 --- /dev/null +++ b/ui/public/xterm-themes/material_light.json @@ -0,0 +1,22 @@ +{ + "foreground": "#2f2f2f", + "background": "#eaeaea", + "cursor": "#16afca", + "selection": "#23232280", + "black": "#000000", + "red": "#b7141f", + "green": "#457b24", + "yellow": "#f6981e", + "blue": "#134eb2", + "magenta": "#560088", + "cyan": "#0e717c", + "white": "#f5f5f5", + "brightBlack": "#424242", + "brightRed": "#e83b3f", + "brightGreen": "#7aba3a", + "brightYellow": "#ffea2e", + "brightBlue": "#54a4f3", + "brightMagenta": "#aa4dbc", + "brightCyan": "#26bbd1", + "brightWhite": "#d9d9d9" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/metadata.json b/ui/public/xterm-themes/metadata.json new file mode 100644 index 00000000000..449c3880e55 --- /dev/null +++ b/ui/public/xterm-themes/metadata.json @@ -0,0 +1,209 @@ +[ + { + "name": "ShellHub Dark", + "file": "shellhub_dark.json", + "dark": true, + "preview": { + "background": "#18191B", + "foreground": "#667ACC" + } + }, + { + "name": "ShellHub Light", + "file": "shellhub_light.json", + "dark": false, + "preview": { + "background": "#F5F5F5", + "foreground": "#667ACC" + } + }, + { + "name": "Terminal Dark", + "file": "terminal_dark.json", + "dark": true, + "preview": { + "background": "#141729", + "foreground": "#21b568" + } + }, + { + "name": "Terminal Light", + "file": "terminal_light.json", + "dark": false, + "preview": { + "background": "#d5dde0", + "foreground": "#32364a" + } + }, + { + "name": "Atom One Dark", + "file": "atom_one_dark.json", + "dark": true, + "preview": { + "background": "#1e2127", + "foreground": "#abb2bf" + } + }, + { + "name": "Atom One Light", + "file": "atom_one_light.json", + "dark": false, + "preview": { + "background": "#f9f9f9", + "foreground": "#383a42" + } + }, + { + "name": "Gruvbox Dark", + "file": "gruvbox_dark.json", + "dark": true, + "preview": { + "background": "#282828", + "foreground": "#ebdbb2" + } + }, + { + "name": "Gruvbox Light", + "file": "gruvbox_light.json", + "dark": false, + "preview": { + "background": "#fbf1c7", + "foreground": "#282828" + } + }, + { + "name": "Material Dark", + "file": "material_dark.json", + "dark": true, + "preview": { + "background": "#232322", + "foreground": "#e5e5e5" + } + }, + { + "name": "Material Light", + "file": "material_light.json", + "dark": false, + "preview": { + "background": "#eaeaea", + "foreground": "#2f2f2f" + } + }, + { + "name": "Basic", + "file": "basic.json", + "dark": false, + "preview": { + "background": "#ffffff", + "foreground": "#000000" + } + }, + { + "name": "Homebrew", + "file": "homebrew.json", + "dark": true, + "preview": { + "background": "#000000", + "foreground": "#00ff00" + } + }, + { + "name": "Grass", + "file": "grass.json", + "dark": true, + "preview": { + "background": "#13773d", + "foreground": "#fff0a5" + } + }, + { + "name": "Man Page", + "file": "man_page.json", + "dark": false, + "preview": { + "background": "#fef49c", + "foreground": "#000000" + } + }, + { + "name": "Ocean", + "file": "ocean.json", + "dark": true, + "preview": { + "background": "#224fbc", + "foreground": "#ffffff" + } + }, + { + "name": "Pro", + "file": "pro.json", + "dark": true, + "preview": { + "background": "#000000", + "foreground": "#f2f2f2" + } + }, + { + "name": "Red Sands", + "file": "red_sands.json", + "dark": true, + "preview": { + "background": "#7a251e", + "foreground": "#d7c9a7" + } + }, + { + "name": "Solarized Dark", + "file": "solarized_dark.json", + "dark": true, + "preview": { + "background": "#002b36", + "foreground": "#839496" + } + }, + { + "name": "Solarized Light", + "file": "solarized_light.json", + "dark": false, + "preview": { + "background": "#fdf6e3", + "foreground": "#657b83" + } + }, + { + "name": "Dracula", + "file": "dracula.json", + "dark": true, + "preview": { + "background": "#282a36", + "foreground": "#f8f8f2" + } + }, + { + "name": "Monokai", + "file": "monokai.json", + "dark": true, + "preview": { + "background": "#0c0c0c", + "foreground": "#d9d9d9" + } + }, + { + "name": "Nord Light", + "file": "nord_light.json", + "dark": false, + "preview": { + "background": "#e5e9f0", + "foreground": "#414858" + } + }, + { + "name": "Nord Dark", + "file": "nord_dark.json", + "dark": true, + "preview": { + "background": "#2e3440", + "foreground": "#d8dee9" + } + } +] diff --git a/ui/public/xterm-themes/monokai.json b/ui/public/xterm-themes/monokai.json new file mode 100644 index 00000000000..d05f0c21fd4 --- /dev/null +++ b/ui/public/xterm-themes/monokai.json @@ -0,0 +1,22 @@ +{ + "foreground": "#d9d9d9", + "background": "#0c0c0c", + "cursor": "#fc971f", + "selection": "#d9d9d980", + "black": "#1a1a1a", + "red": "#dd0056", + "green": "#92d526", + "yellow": "#fd971f", + "blue": "#874deb", + "magenta": "#ea095a", + "cyan": "#48bfd8", + "white": "#c4c5b5", + "brightBlack": "#625e4c", + "brightRed": "#ff3382", + "brightGreen": "#a6f12f", + "brightYellow": "#e0d561", + "brightBlue": "#9d65ff", + "brightMagenta": "#ff116d", + "brightCyan": "#58d1eb", + "brightWhite": "#f6f6ef" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/nord_dark.json b/ui/public/xterm-themes/nord_dark.json new file mode 100644 index 00000000000..9ff6d009070 --- /dev/null +++ b/ui/public/xterm-themes/nord_dark.json @@ -0,0 +1,22 @@ +{ + "foreground": "#d8dee9", + "background": "#2e3440", + "cursor": "#eceff4", + "selection": "#eceff480", + "black": "#3b4252", + "red": "#ae545d", + "green": "#8ca377", + "yellow": "#dabe84", + "blue": "#718fae", + "magenta": "#95728e", + "cyan": "#78acbb", + "white": "#d8dee9", + "brightBlack": "#4c556a", + "brightRed": "#d97982", + "brightGreen": "#a3be8b", + "brightYellow": "#eacb8a", + "brightBlue": "#a4c7e9", + "brightMagenta": "#b48dac", + "brightCyan": "#8fbcbb", + "brightWhite": "#eceff4" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/nord_light.json b/ui/public/xterm-themes/nord_light.json new file mode 100644 index 00000000000..70bf1b1c243 --- /dev/null +++ b/ui/public/xterm-themes/nord_light.json @@ -0,0 +1,22 @@ +{ + "foreground": "#414858", + "background": "#e5e9f0", + "cursor": "#88c0d0", + "selection": "#41485880", + "black": "#2c3344", + "red": "#ae545d", + "green": "#8ca377", + "yellow": "#dabe84", + "blue": "#718fae", + "magenta": "#95728e", + "cyan": "#78acbb", + "white": "#d8dee9", + "brightBlack": "#4c556a", + "brightRed": "#d97982", + "brightGreen": "#a3be8b", + "brightYellow": "#eacb8a", + "brightBlue": "#a4c7e9", + "brightMagenta": "#b48dac", + "brightCyan": "#8fbcbb", + "brightWhite": "#eceff4" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/ocean.json b/ui/public/xterm-themes/ocean.json new file mode 100644 index 00000000000..9d361ee89a9 --- /dev/null +++ b/ui/public/xterm-themes/ocean.json @@ -0,0 +1,22 @@ +{ + "foreground": "#ffffff", + "background": "#224fbc", + "cursor": "#7f7f7f", + "selection": "#ffffff80", + "black": "#000000", + "red": "#881616", + "green": "#399518", + "yellow": "#dda114", + "blue": "#00a3ff", + "magenta": "#a83aff", + "cyan": "#28ccd6", + "white": "#d3d3d3", + "brightBlack": "#d2d2d2", + "brightRed": "#ff7658", + "brightGreen": "#00ff47", + "brightYellow": "#f5c147", + "brightBlue": "#79ceff", + "brightMagenta": "#ea6fff", + "brightCyan": "#58f4ff", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/pro.json b/ui/public/xterm-themes/pro.json new file mode 100644 index 00000000000..b3bf8b8756c --- /dev/null +++ b/ui/public/xterm-themes/pro.json @@ -0,0 +1,22 @@ +{ + "foreground": "#f2f2f2", + "background": "#000000", + "cursor": "#4d4d4d", + "selection": "#f2f2f280", + "black": "#2e2e2e", + "red": "#c93434", + "green": "#348e48", + "yellow": "#e09e00", + "blue": "#002bc7", + "magenta": "#e235ff", + "cyan": "#3fc1dd", + "white": "#d0cfcf", + "brightBlack": "#5b5b5b", + "brightRed": "#ff6767", + "brightGreen": "#31ff31", + "brightYellow": "#ffdca8", + "brightBlue": "#4465da", + "brightMagenta": "#ff5fc8", + "brightCyan": "#8debff", + "brightWhite": "#e6e6e6" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/red_sands.json b/ui/public/xterm-themes/red_sands.json new file mode 100644 index 00000000000..26c4d2d88bd --- /dev/null +++ b/ui/public/xterm-themes/red_sands.json @@ -0,0 +1,22 @@ +{ + "foreground": "#d7c9a7", + "background": "#7a251e", + "cursor": "#ffffff", + "selection": "#d7c9a780", + "black": "#000000", + "red": "#d30e0e", + "green": "#58aa47", + "yellow": "#ffa673", + "blue": "#0072ff", + "magenta": "#ff57ee", + "cyan": "#3bcbea", + "white": "#e6e6e6", + "brightBlack": "#606060", + "brightRed": "#e0692f", + "brightGreen": "#b2ffa2", + "brightYellow": "#ffc27b", + "brightBlue": "#0193fc", + "brightMagenta": "#ffbce2", + "brightCyan": "#70e4ff", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/shellhub_dark.json b/ui/public/xterm-themes/shellhub_dark.json new file mode 100644 index 00000000000..1ab95d1ede8 --- /dev/null +++ b/ui/public/xterm-themes/shellhub_dark.json @@ -0,0 +1,22 @@ +{ + "foreground": "#667ACC", + "background": "#18191B", + "cursor": "#5c6370", + "selection": "#abb2bf80", + "black": "#000000", + "red": "#ca6169", + "green": "#82a568", + "yellow": "#bf8c5d", + "blue": "#56a2e1", + "magenta": "#b76ccd", + "cyan": "#4e9aa3", + "white": "#c5cbd6", + "brightBlack": "#5c6370", + "brightRed": "#e77c84", + "brightGreen": "#b4e294", + "brightYellow": "#e9b17b", + "brightBlue": "#7ec5ff", + "brightMagenta": "#db8df2", + "brightCyan": "#64cfdd", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/shellhub_light.json b/ui/public/xterm-themes/shellhub_light.json new file mode 100644 index 00000000000..d48eacee184 --- /dev/null +++ b/ui/public/xterm-themes/shellhub_light.json @@ -0,0 +1,22 @@ +{ + "foreground": "#667ACC", + "background": "#F5F5F5", + "cursor": "#383a42", + "selection": "#383a4280", + "black": "#000000", + "red": "#e45649", + "green": "#4c9b4b", + "yellow": "#c99525", + "blue": "#4078f2", + "magenta": "#a626a4", + "cyan": "#0184bc", + "white": "#b8b9bf", + "brightBlack": "#474747", + "brightRed": "#ff7468", + "brightGreen": "#74ca72", + "brightYellow": "#dba633", + "brightBlue": "#6a99ff", + "brightMagenta": "#c142bf", + "brightCyan": "#00b1fd", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/solarized_dark.json b/ui/public/xterm-themes/solarized_dark.json new file mode 100644 index 00000000000..5d10cfd34a9 --- /dev/null +++ b/ui/public/xterm-themes/solarized_dark.json @@ -0,0 +1,22 @@ +{ + "foreground": "#839496", + "background": "#002b36", + "cursor": "#657779", + "selection": "#65777980", + "black": "#11586a", + "red": "#dc322f", + "green": "#8ea20a", + "yellow": "#b58900", + "blue": "#268bd2", + "magenta": "#c41f6f", + "cyan": "#2aa198", + "white": "#e7e0cc", + "brightBlack": "#003b4a", + "brightRed": "#f15c59", + "brightGreen": "#677558", + "brightYellow": "#7e7a61", + "brightBlue": "#83a8ad", + "brightMagenta": "#886cc4", + "brightCyan": "#72b6b6", + "brightWhite": "#fdf6e3" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/solarized_light.json b/ui/public/xterm-themes/solarized_light.json new file mode 100644 index 00000000000..4578dc40b12 --- /dev/null +++ b/ui/public/xterm-themes/solarized_light.json @@ -0,0 +1,22 @@ +{ + "foreground": "#657b83", + "background": "#fdf6e3", + "cursor": "#657b83", + "selection": "#657b8380", + "black": "#073642", + "red": "#dc322f", + "green": "#8fa30a", + "yellow": "#b58900", + "blue": "#268bd2", + "magenta": "#c41f6f", + "cyan": "#2aa198", + "white": "#e6e0cb", + "brightBlack": "#00252f", + "brightRed": "#e05319", + "brightGreen": "#667558", + "brightYellow": "#7e7960", + "brightBlue": "#83a8ad", + "brightMagenta": "#886cc4", + "brightCyan": "#72b6b6", + "brightWhite": "#fff0c7" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/terminal_dark.json b/ui/public/xterm-themes/terminal_dark.json new file mode 100644 index 00000000000..f4db79938bf --- /dev/null +++ b/ui/public/xterm-themes/terminal_dark.json @@ -0,0 +1,22 @@ +{ + "foreground": "#21b568", + "background": "#141729", + "cursor": "#21b568", + "selection": "#21b56880", + "black": "#343851", + "red": "#f24e50", + "green": "#008463", + "yellow": "#eca855", + "blue": "#08639f", + "magenta": "#c13282", + "cyan": "#2091f6", + "white": "#e2e3e8", + "brightBlack": "#8d91a5", + "brightRed": "#ff7375", + "brightGreen": "#3ed7be", + "brightYellow": "#fdc47d", + "brightBlue": "#6ba0c3", + "brightMagenta": "#ff7dc5", + "brightCyan": "#44a7ff", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/public/xterm-themes/terminal_light.json b/ui/public/xterm-themes/terminal_light.json new file mode 100644 index 00000000000..8c7a68b2cee --- /dev/null +++ b/ui/public/xterm-themes/terminal_light.json @@ -0,0 +1,22 @@ +{ + "foreground": "#32364a", + "background": "#d5dde0", + "cursor": "#32364a", + "selection": "#32364a80", + "black": "#141729", + "red": "#f24e50", + "green": "#198c51", + "yellow": "#f8aa4b", + "blue": "#004878", + "magenta": "#8f3c91", + "cyan": "#2091f6", + "white": "#eeeeee", + "brightBlack": "#3e4257", + "brightRed": "#ff7375", + "brightGreen": "#21b568", + "brightYellow": "#fdc47d", + "brightBlue": "#1d6da2", + "brightMagenta": "#ff7dc5", + "brightCyan": "#44a7ff", + "brightWhite": "#ffffff" +} \ No newline at end of file diff --git a/ui/scripts/entrypoint.sh b/ui/scripts/entrypoint.sh index d193c521581..53459399422 100755 --- a/ui/scripts/entrypoint.sh +++ b/ui/scripts/entrypoint.sh @@ -4,4 +4,4 @@ SCRIPTS_DIR=$(dirname $(readlink -f $0)) $SCRIPTS_DIR/env.sh SHELLHUB > /usr/share/nginx/html/env.js -nginx-debug -g "daemon off;" +exec nginx-debug -g "daemon off;" diff --git a/ui/src/App.vue b/ui/src/App.vue index 63a1e00ae4d..2b1953d04a5 100755 --- a/ui/src/App.vue +++ b/ui/src/App.vue @@ -1,5 +1,5 @@ - diff --git a/ui/src/components/Announcements/AnnouncementsModal.vue b/ui/src/components/Announcements/AnnouncementsModal.vue index b2d48ee3e28..8db6ab64f08 100644 --- a/ui/src/components/Announcements/AnnouncementsModal.vue +++ b/ui/src/components/Announcements/AnnouncementsModal.vue @@ -1,93 +1,63 @@ + - diff --git a/ui/src/components/AuthMFA/RecoveryHelper.vue b/ui/src/components/AuthMFA/RecoveryHelper.vue new file mode 100644 index 00000000000..6b1a4541f8e --- /dev/null +++ b/ui/src/components/AuthMFA/RecoveryHelper.vue @@ -0,0 +1,135 @@ + + + diff --git a/ui/src/components/Billing/BillingCheckout.vue b/ui/src/components/Billing/BillingCheckout.vue index bec1f07fdbb..acf1f73dd92 100644 --- a/ui/src/components/Billing/BillingCheckout.vue +++ b/ui/src/components/Billing/BillingCheckout.vue @@ -2,37 +2,52 @@ -

Payment Method:

+

+ Payment Method: +

-

This is the payment method you have selected for your ShellHub Cloud subscription

+

+ This is the payment method you have selected for your ShellHub Cloud subscription +

- + - + - + {{ paymentMethod.number }} - + {{ paymentMethod.exp_month + "/" + paymentMethod.exp_year }} - + {{ paymentMethod.cvc }} - + default This payment method will be used on your subscription + > + This payment method will be used on your subscription + @@ -42,13 +57,18 @@ -

Additional Information:

+

+ Additional Information: +

-
    +
    • Your selected payment method will be charged automatically on a monthly basis

    • -
    • You can manage your payment methods, invoices and subscription details in the Billing Portal.

    • +
    • You can manage your payment methods, invoices and subscription details in the Billing Portal.

    • Invoices will be generated and available for download at the beginning of each billing cycle.

    • -
    • You have the option to cancel your subscription at any time through the Billing Portal.

    • +
    • You have the option to cancel your subscription at any time through the Billing Portal.

    @@ -59,21 +79,20 @@ import { onMounted, reactive, computed } from "vue"; import { IPaymentMethod } from "@/interfaces/ICustomer"; import BillingIcon from "@/components/Billing/BillingIcon.vue"; -import { store } from "@/store"; +import useCustomerStore from "@/store/modules/customer"; -const filter: IPaymentMethod = { brand: "", cvc: "", default: false, exp_year: 0, exp_month: 0, id: "", number: "" }; -const paymentMethod = reactive(filter); -const consumerData = computed(() => store.getters["customer/getCustomer"]); +const customerStore = useCustomerStore(); +const paymentMethod = reactive({ brand: "", cvc: "", default: false, exp_year: 0, exp_month: 0, id: "", number: "" }); +const customer = computed(() => customerStore.customer); onMounted(async () => { - await store.dispatch("customer/fetchCustomer"); - const customerDetails = consumerData.value.data; - const pm = customerDetails.payment_methods?.filter((value: IPaymentMethod) => value.default === true)[0]; - paymentMethod.brand = pm?.brand || ""; - paymentMethod.cvc = pm?.cvc || ""; - paymentMethod.exp_year = pm?.exp_year || 0; - paymentMethod.exp_month = pm?.exp_month || 0; - paymentMethod.number = pm?.number || ""; - paymentMethod.default = pm?.default || false; + await customerStore.fetchCustomer(); + const defaultPaymentMethod = customer.value.payment_methods?.filter((value: IPaymentMethod) => value.default === true)[0]; + paymentMethod.brand = defaultPaymentMethod?.brand || ""; + paymentMethod.cvc = defaultPaymentMethod?.cvc || ""; + paymentMethod.exp_year = defaultPaymentMethod?.exp_year || 0; + paymentMethod.exp_month = defaultPaymentMethod?.exp_month || 0; + paymentMethod.number = defaultPaymentMethod?.number || ""; + paymentMethod.default = defaultPaymentMethod?.default || false; }); diff --git a/ui/src/components/Billing/BillingDialog.vue b/ui/src/components/Billing/BillingDialog.vue new file mode 100644 index 00000000000..799939dbe85 --- /dev/null +++ b/ui/src/components/Billing/BillingDialog.vue @@ -0,0 +1,183 @@ + + + diff --git a/ui/src/components/Billing/BillingIcon.vue b/ui/src/components/Billing/BillingIcon.vue index 8e55d222395..10a0c9d7ed6 100644 --- a/ui/src/components/Billing/BillingIcon.vue +++ b/ui/src/components/Billing/BillingIcon.vue @@ -1,60 +1,31 @@ - diff --git a/ui/src/components/Billing/BillingLetter.vue b/ui/src/components/Billing/BillingLetter.vue index 9fb57b7c4c0..16209f386f4 100644 --- a/ui/src/components/Billing/BillingLetter.vue +++ b/ui/src/components/Billing/BillingLetter.vue @@ -1,15 +1,20 @@ diff --git a/ui/src/components/Billing/BillingPayment.vue b/ui/src/components/Billing/BillingPayment.vue index 89de11c1c09..77949fbf44a 100644 --- a/ui/src/components/Billing/BillingPayment.vue +++ b/ui/src/components/Billing/BillingPayment.vue @@ -5,29 +5,50 @@ v-model="customer.name" label="Name" disabled - data-test="customer-name" /> + data-test="customer-name" + /> + data-test="customer-email" + /> -

    Your credit cards

    +

    + Your credit cards +

    - - - - + class="w-100 pa-0 pt-2 content-card" + > + + + + @@ -40,17 +61,31 @@ {{ item.cvc }} - + default This payment method will be used on your subscription + > + This payment method will be used on your subscription + - - + + @@ -60,63 +95,99 @@ You don't have any registered cards yet, please add one - - + + + :elements-options="elementsOptions" + > + :options="cardOptions" + /> - + + data-test="add-card-btn" + @click="addNewCard ? savePayment() : addNewCard = true" + /> - + diff --git a/ui/src/components/Box/BoxMessage.vue b/ui/src/components/Box/BoxMessage.vue deleted file mode 100644 index a00370606df..00000000000 --- a/ui/src/components/Box/BoxMessage.vue +++ /dev/null @@ -1,236 +0,0 @@ - - - - - diff --git a/ui/src/components/Card/Card.vue b/ui/src/components/Card/Card.vue deleted file mode 100644 index aeeea4d397a..00000000000 --- a/ui/src/components/Card/Card.vue +++ /dev/null @@ -1,97 +0,0 @@ - - - - - diff --git a/ui/src/components/Containers/Container.vue b/ui/src/components/Containers/Container.vue new file mode 100644 index 00000000000..8c7b23f616b --- /dev/null +++ b/ui/src/components/Containers/Container.vue @@ -0,0 +1,79 @@ + + + diff --git a/ui/src/components/Containers/ContainerAdd.vue b/ui/src/components/Containers/ContainerAdd.vue new file mode 100644 index 00000000000..81f5a1f4237 --- /dev/null +++ b/ui/src/components/Containers/ContainerAdd.vue @@ -0,0 +1,69 @@ + + + diff --git a/ui/src/components/Containers/ContainerList.vue b/ui/src/components/Containers/ContainerList.vue new file mode 100644 index 00000000000..ba0b49485e9 --- /dev/null +++ b/ui/src/components/Containers/ContainerList.vue @@ -0,0 +1,16 @@ + + + diff --git a/ui/src/components/Containers/ContainerPendingList.vue b/ui/src/components/Containers/ContainerPendingList.vue new file mode 100644 index 00000000000..b49095d890b --- /dev/null +++ b/ui/src/components/Containers/ContainerPendingList.vue @@ -0,0 +1,16 @@ + + + diff --git a/ui/src/components/Containers/ContainerRejectedList.vue b/ui/src/components/Containers/ContainerRejectedList.vue new file mode 100644 index 00000000000..4a0e28be6d6 --- /dev/null +++ b/ui/src/components/Containers/ContainerRejectedList.vue @@ -0,0 +1,16 @@ + + + diff --git a/ui/src/components/CopyCommandField.vue b/ui/src/components/CopyCommandField.vue new file mode 100644 index 00000000000..d703a1e5289 --- /dev/null +++ b/ui/src/components/CopyCommandField.vue @@ -0,0 +1,57 @@ + + + + + diff --git a/ui/src/components/DataTable.vue b/ui/src/components/DataTable.vue deleted file mode 100644 index c3e8b2ffb75..00000000000 --- a/ui/src/components/DataTable.vue +++ /dev/null @@ -1,140 +0,0 @@ - - - - - diff --git a/ui/src/components/Devices/Device.vue b/ui/src/components/Devices/Device.vue index 530c2afffb6..53833dc438d 100644 --- a/ui/src/components/Devices/Device.vue +++ b/ui/src/components/Devices/Device.vue @@ -1,30 +1,78 @@ - diff --git a/ui/src/components/Devices/DeviceAcceptWarning.vue b/ui/src/components/Devices/DeviceAcceptWarning.vue index 3d84e0856ed..45789c097ae 100644 --- a/ui/src/components/Devices/DeviceAcceptWarning.vue +++ b/ui/src/components/Devices/DeviceAcceptWarning.vue @@ -1,69 +1,31 @@ diff --git a/ui/src/components/Devices/DeviceActionButton.vue b/ui/src/components/Devices/DeviceActionButton.vue index 8b7d4014aed..3052ffa3431 100644 --- a/ui/src/components/Devices/DeviceActionButton.vue +++ b/ui/src/components/Devices/DeviceActionButton.vue @@ -1,265 +1,195 @@