diff --git a/.copier/.copier-answers.yml.jinja b/.copier/.copier-answers.yml.jinja
new file mode 100644
index 0000000000..0028a2398a
--- /dev/null
+++ b/.copier/.copier-answers.yml.jinja
@@ -0,0 +1 @@
+{{ _copier_answers|to_json -}}
diff --git a/.copier/update_dotenv.py b/.copier/update_dotenv.py
new file mode 100644
index 0000000000..6576885626
--- /dev/null
+++ b/.copier/update_dotenv.py
@@ -0,0 +1,26 @@
+from pathlib import Path
+import json
+
+# Update the .env file with the answers from the .copier-answers.yml file
+# without using Jinja2 templates in the .env file, this way the code works as is
+# without needing Copier, but if Copier is used, the .env file will be updated
+root_path = Path(__file__).parent.parent
+answers_path = Path(__file__).parent / ".copier-answers.yml"
+answers = json.loads(answers_path.read_text())
+env_path = root_path / ".env"
+env_content = env_path.read_text()
+lines = []
+for line in env_content.splitlines():
+ for key, value in answers.items():
+ upper_key = key.upper()
+ if line.startswith(f"{upper_key}="):
+ if " " in value:
+ content = f"{upper_key}={value!r}"
+ else:
+ content = f"{upper_key}={value}"
+ new_line = line.replace(line, content)
+ lines.append(new_line)
+ break
+ else:
+ lines.append(line)
+env_path.write_text("\n".join(lines))
diff --git a/.env b/.env
new file mode 100644
index 0000000000..1d44286e25
--- /dev/null
+++ b/.env
@@ -0,0 +1,45 @@
+# Domain
+# This would be set to the production domain with an env var on deployment
+# used by Traefik to transmit traffic and aqcuire TLS certificates
+DOMAIN=localhost
+# To test the local Traefik config
+# DOMAIN=localhost.tiangolo.com
+
+# Used by the backend to generate links in emails to the frontend
+FRONTEND_HOST=http://localhost:5173
+# In staging and production, set this env var to the frontend host, e.g.
+# FRONTEND_HOST=https://dashboard.example.com
+
+# Environment: local, staging, production
+ENVIRONMENT=local
+
+PROJECT_NAME="Full Stack FastAPI Project"
+STACK_NAME=full-stack-fastapi-project
+
+# Backend
+BACKEND_CORS_ORIGINS="http://localhost,http://localhost:5173,https://localhost,https://localhost:5173,http://localhost.tiangolo.com"
+SECRET_KEY=changethis
+FIRST_SUPERUSER=admin@example.com
+FIRST_SUPERUSER_PASSWORD=changethis
+
+# Emails
+SMTP_HOST=
+SMTP_USER=
+SMTP_PASSWORD=
+EMAILS_FROM_EMAIL=info@example.com
+SMTP_TLS=True
+SMTP_SSL=False
+SMTP_PORT=587
+
+# Postgres
+POSTGRES_SERVER=localhost
+POSTGRES_PORT=5432
+POSTGRES_DB=app
+POSTGRES_USER=postgres
+POSTGRES_PASSWORD=changethis
+
+SENTRY_DSN=
+
+# Configure these with your own Docker registry images
+DOCKER_IMAGE_BACKEND=backend
+DOCKER_IMAGE_FRONTEND=frontend
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000000..efdba87644
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,2 @@
+* text=auto
+*.sh text eol=lf
diff --git a/.github/DISCUSSION_TEMPLATE/questions.yml b/.github/DISCUSSION_TEMPLATE/questions.yml
new file mode 100644
index 0000000000..af6abf4661
--- /dev/null
+++ b/.github/DISCUSSION_TEMPLATE/questions.yml
@@ -0,0 +1,118 @@
+labels: [question]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thanks for your interest in this project! ๐
+
+ Please follow these instructions, fill every question, and do every step. ๐
+
+ I'm asking this because answering questions and solving problems in GitHub is what consumes most of the time.
+
+ I end up not being able to add new features, fix bugs, review pull requests, etc. as fast as I wish because I have to spend too much time handling questions.
+
+ All that, on top of all the incredible help provided by a bunch of community members, that give a lot of their time to come here and help others.
+
+ That's a lot of work, but if more users came to help others like them just a little bit more, it would be much less effort for them (and you and me ๐ ).
+
+ By asking questions in a structured way (following this) it will be much easier to help you.
+
+ And there's a high chance that you will find the solution along the way and you won't even have to submit it and wait for an answer. ๐
+
+ As there are too many questions, I'll have to discard and close the incomplete ones. That will allow me (and others) to focus on helping people like you that follow the whole process and help us help you. ๐ค
+ - type: checkboxes
+ id: checks
+ attributes:
+ label: First Check
+ description: Please confirm and check all the following options.
+ options:
+ - label: I added a very descriptive title here.
+ required: true
+ - label: I used the GitHub search to find a similar question and didn't find it.
+ required: true
+ - label: I searched in the documentation/README.
+ required: true
+ - label: I already searched in Google "How to do X" and didn't find any information.
+ required: true
+ - label: I already read and followed all the tutorial in the docs/README and didn't find an answer.
+ required: true
+ - type: checkboxes
+ id: help
+ attributes:
+ label: Commit to Help
+ description: |
+ After submitting this, I commit to one of:
+
+ * Read open questions until I find 2 where I can help someone and add a comment to help there.
+ * I already hit the "watch" button in this repository to receive notifications and I commit to help at least 2 people that ask questions in the future.
+
+ options:
+ - label: I commit to help with one of those options ๐
+ required: true
+ - type: textarea
+ id: example
+ attributes:
+ label: Example Code
+ description: |
+ Please add a self-contained, [minimal, reproducible, example](https://stackoverflow.com/help/minimal-reproducible-example) with your use case.
+
+ If I (or someone) can copy it, run it, and see it right away, there's a much higher chance I (or someone) will be able to help you.
+
+ placeholder: |
+ Write your example code here.
+ render: Text
+ validations:
+ required: true
+ - type: textarea
+ id: description
+ attributes:
+ label: Description
+ description: |
+ What is the problem, question, or error?
+
+ Write a short description telling me what you are doing, what you expect to happen, and what is currently happening.
+ placeholder: |
+ * Open the browser and call the endpoint `/`.
+ * It returns a JSON with `{"message": "Hello World"}`.
+ * But I expected it to return `{"message": "Hello Morty"}`.
+ validations:
+ required: true
+ - type: dropdown
+ id: os
+ attributes:
+ label: Operating System
+ description: What operating system are you on?
+ multiple: true
+ options:
+ - Linux
+ - Windows
+ - macOS
+ - Other
+ validations:
+ required: true
+ - type: textarea
+ id: os-details
+ attributes:
+ label: Operating System Details
+ description: You can add more details about your operating system here, in particular if you chose "Other".
+ validations:
+ required: true
+ - type: input
+ id: python-version
+ attributes:
+ label: Python Version
+ description: |
+ What Python version are you using?
+
+ You can find the Python version with:
+
+ ```bash
+ python --version
+ ```
+ validations:
+ required: true
+ - type: textarea
+ id: context
+ attributes:
+ label: Additional Context
+ description: Add any additional context information or screenshots you think are useful.
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 0000000000..50bde36072
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,10 @@
+blank_issues_enabled: false
+contact_links:
+ - name: Security Contact
+ about: Please report security vulnerabilities to security@tiangolo.com
+ - name: Question or Problem
+ about: Ask a question or ask about a problem in GitHub Discussions.
+ url: https://github.com/fastapi/full-stack-fastapi-template/discussions/categories/questions
+ - name: Feature Request
+ about: To suggest an idea or ask about a feature, please start with a question saying what you would like to achieve. There might be a way to do it already.
+ url: https://github.com/fastapi/full-stack-fastapi-template/discussions/categories/questions
diff --git a/.github/ISSUE_TEMPLATE/privileged.yml b/.github/ISSUE_TEMPLATE/privileged.yml
new file mode 100644
index 0000000000..6438848c83
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/privileged.yml
@@ -0,0 +1,22 @@
+name: Privileged
+description: You are @tiangolo or he asked you directly to create an issue here. If not, check the other options. ๐
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thanks for your interest in this project! ๐
+
+ If you are not @tiangolo or he didn't ask you directly to create an issue here, please start the conversation in a [Question in GitHub Discussions](https://github.com/tiangolo/full-stack-fastapi-template/discussions/categories/questions) instead.
+ - type: checkboxes
+ id: privileged
+ attributes:
+ label: Privileged issue
+ description: Confirm that you are allowed to create an issue here.
+ options:
+ - label: I'm @tiangolo or he asked me directly to create an issue here.
+ required: true
+ - type: textarea
+ id: content
+ attributes:
+ label: Issue Content
+ description: Add the content of the issue here.
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000000..e83b24786d
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,46 @@
+version: 2
+updates:
+ # GitHub Actions
+ - package-ecosystem: github-actions
+ directory: /
+ schedule:
+ interval: daily
+ commit-message:
+ prefix: โฌ
+ labels: [dependencies, internal]
+ # Python uv
+ - package-ecosystem: uv
+ directory: /
+ schedule:
+ interval: weekly
+ commit-message:
+ prefix: โฌ
+ labels: [dependencies, internal]
+ # bun
+ - package-ecosystem: bun
+ directory: /
+ schedule:
+ interval: weekly
+ commit-message:
+ prefix: โฌ
+ labels: [dependencies, internal]
+ ignore:
+ - dependency-name: "@hey-api/openapi-ts"
+ # Docker
+ - package-ecosystem: docker
+ directories:
+ - /backend
+ - /frontend
+ schedule:
+ interval: weekly
+ commit-message:
+ prefix: โฌ
+ labels: [dependencies, internal]
+ # Docker Compose
+ - package-ecosystem: docker-compose
+ directory: /
+ schedule:
+ interval: weekly
+ commit-message:
+ prefix: โฌ
+ labels: [dependencies, internal]
diff --git a/.github/labeler.yml b/.github/labeler.yml
new file mode 100644
index 0000000000..ed657c23d7
--- /dev/null
+++ b/.github/labeler.yml
@@ -0,0 +1,25 @@
+docs:
+ - all:
+ - changed-files:
+ - any-glob-to-any-file:
+ - '**/*.md'
+ - all-globs-to-all-files:
+ - '!frontend/**'
+ - '!backend/**'
+ - '!.github/**'
+ - '!scripts/**'
+ - '!.gitignore'
+ - '!.pre-commit-config.yaml'
+
+internal:
+ - all:
+ - changed-files:
+ - any-glob-to-any-file:
+ - .github/**
+ - scripts/**
+ - .gitignore
+ - .pre-commit-config.yaml
+ - all-globs-to-all-files:
+ - '!./**/*.md'
+ - '!frontend/**'
+ - '!backend/**'
diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml
new file mode 100644
index 0000000000..dccea83f35
--- /dev/null
+++ b/.github/workflows/add-to-project.yml
@@ -0,0 +1,18 @@
+name: Add to Project
+
+on:
+ pull_request_target:
+ issues:
+ types:
+ - opened
+ - reopened
+
+jobs:
+ add-to-project:
+ name: Add to project
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/add-to-project@v1.0.2
+ with:
+ project-url: https://github.com/orgs/fastapi/projects/2
+ github-token: ${{ secrets.PROJECTS_TOKEN }}
diff --git a/.github/workflows/deploy-production.yml b/.github/workflows/deploy-production.yml
new file mode 100644
index 0000000000..fd1190070e
--- /dev/null
+++ b/.github/workflows/deploy-production.yml
@@ -0,0 +1,32 @@
+name: Deploy to Production
+
+on:
+ release:
+ types:
+ - published
+
+jobs:
+ deploy:
+ # Do not deploy in the main repository, only in user projects
+ if: github.repository_owner != 'fastapi'
+ runs-on:
+ - self-hosted
+ - production
+ env:
+ ENVIRONMENT: production
+ DOMAIN: ${{ secrets.DOMAIN_PRODUCTION }}
+ STACK_NAME: ${{ secrets.STACK_NAME_PRODUCTION }}
+ SECRET_KEY: ${{ secrets.SECRET_KEY }}
+ FIRST_SUPERUSER: ${{ secrets.FIRST_SUPERUSER }}
+ FIRST_SUPERUSER_PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }}
+ SMTP_HOST: ${{ secrets.SMTP_HOST }}
+ SMTP_USER: ${{ secrets.SMTP_USER }}
+ SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
+ EMAILS_FROM_EMAIL: ${{ secrets.EMAILS_FROM_EMAIL }}
+ POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
+ SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v6
+ - run: docker compose -f compose.yml --project-name ${{ secrets.STACK_NAME_PRODUCTION }} build
+ - run: docker compose -f compose.yml --project-name ${{ secrets.STACK_NAME_PRODUCTION }} up -d
diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/deploy-staging.yml
new file mode 100644
index 0000000000..7968f950e7
--- /dev/null
+++ b/.github/workflows/deploy-staging.yml
@@ -0,0 +1,32 @@
+name: Deploy to Staging
+
+on:
+ push:
+ branches:
+ - master
+
+jobs:
+ deploy:
+ # Do not deploy in the main repository, only in user projects
+ if: github.repository_owner != 'fastapi'
+ runs-on:
+ - self-hosted
+ - staging
+ env:
+ ENVIRONMENT: staging
+ DOMAIN: ${{ secrets.DOMAIN_STAGING }}
+ STACK_NAME: ${{ secrets.STACK_NAME_STAGING }}
+ SECRET_KEY: ${{ secrets.SECRET_KEY }}
+ FIRST_SUPERUSER: ${{ secrets.FIRST_SUPERUSER }}
+ FIRST_SUPERUSER_PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }}
+ SMTP_HOST: ${{ secrets.SMTP_HOST }}
+ SMTP_USER: ${{ secrets.SMTP_USER }}
+ SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
+ EMAILS_FROM_EMAIL: ${{ secrets.EMAILS_FROM_EMAIL }}
+ POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
+ SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v6
+ - run: docker compose -f compose.yml --project-name ${{ secrets.STACK_NAME_STAGING }} build
+ - run: docker compose -f compose.yml --project-name ${{ secrets.STACK_NAME_STAGING }} up -d
diff --git a/.github/workflows/detect-conflicts.yml b/.github/workflows/detect-conflicts.yml
new file mode 100644
index 0000000000..aba329db85
--- /dev/null
+++ b/.github/workflows/detect-conflicts.yml
@@ -0,0 +1,19 @@
+name: "Conflict detector"
+on:
+ push:
+ pull_request_target:
+ types: [synchronize]
+
+jobs:
+ main:
+ permissions:
+ contents: read
+ pull-requests: write
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check if PRs have merge conflicts
+ uses: eps1lon/actions-label-merge-conflict@v3
+ with:
+ dirtyLabel: "conflicts"
+ repoToken: "${{ secrets.GITHUB_TOKEN }}"
+ commentOnDirty: "This pull request has a merge conflict that needs to be resolved."
diff --git a/.github/workflows/issue-manager.yml b/.github/workflows/issue-manager.yml
index 42748fbb2b..425c926634 100644
--- a/.github/workflows/issue-manager.yml
+++ b/.github/workflows/issue-manager.yml
@@ -2,27 +2,54 @@ name: Issue Manager
on:
schedule:
- - cron: "0 0 * * *"
+ - cron: "21 17 * * *"
issue_comment:
types:
- created
- - edited
issues:
types:
- labeled
+ pull_request_target:
+ types:
+ - labeled
+ workflow_dispatch:
+
+permissions:
+ issues: write
+ pull-requests: write
jobs:
issue-manager:
+ if: github.repository_owner == 'fastapi'
runs-on: ubuntu-latest
steps:
- - uses: tiangolo/issue-manager@0.2.0
+ - name: Dump GitHub context
+ env:
+ GITHUB_CONTEXT: ${{ toJson(github) }}
+ run: echo "$GITHUB_CONTEXT"
+ - uses: tiangolo/issue-manager@0.6.0
with:
token: ${{ secrets.GITHUB_TOKEN }}
config: >
{
"answered": {
- "users": ["tiangolo"],
"delay": 864000,
- "message": "Assuming the original issue was solved, it will be automatically closed now. But feel free to add more comments or create new issues."
+ "message": "Assuming the original need was handled, this will be automatically closed now. But feel free to add more comments or create new issues or PRs."
+ },
+ "waiting": {
+ "delay": 2628000,
+ "message": "As this PR has been waiting for the original user for a while but seems to be inactive, it's now going to be closed. But if there's anyone interested, feel free to create a new PR.",
+ "reminder": {
+ "before": "P3D",
+ "message": "Heads-up: this will be closed in 3 days unless there's new activity."
+ }
+ },
+ "invalid": {
+ "delay": 0,
+ "message": "This was marked as invalid and will be closed now. If this is an error, please provide additional details."
+ },
+ "maybe-ai": {
+ "delay": 0,
+ "message": "This was marked as potentially AI generated and will be closed now. If this is an error, please provide additional details, make sure to read the docs about contributing and AI."
}
}
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
new file mode 100644
index 0000000000..7aeb448e6f
--- /dev/null
+++ b/.github/workflows/labeler.yml
@@ -0,0 +1,33 @@
+name: Labels
+on:
+ pull_request_target:
+ types:
+ - opened
+ - synchronize
+ - reopened
+ # For label-checker
+ - labeled
+ - unlabeled
+
+jobs:
+ labeler:
+ permissions:
+ contents: read
+ pull-requests: write
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/labeler@v6
+ if: ${{ github.event.action != 'labeled' && github.event.action != 'unlabeled' }}
+ - run: echo "Done adding labels"
+ # Run this after labeler applied labels
+ check-labels:
+ needs:
+ - labeler
+ permissions:
+ pull-requests: read
+ runs-on: ubuntu-latest
+ steps:
+ - uses: docker://agilepathway/pull-request-label-checker:latest
+ with:
+ one_of: breaking,security,feature,bug,refactor,upgrade,docs,lang-all,internal
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/latest-changes.yml b/.github/workflows/latest-changes.yml
new file mode 100644
index 0000000000..1f6cde6deb
--- /dev/null
+++ b/.github/workflows/latest-changes.yml
@@ -0,0 +1,40 @@
+name: Latest Changes
+
+on:
+ pull_request_target:
+ branches:
+ - master
+ types:
+ - closed
+ workflow_dispatch:
+ inputs:
+ number:
+ description: PR number
+ required: true
+ debug_enabled:
+ description: "Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)"
+ required: false
+ default: "false"
+
+jobs:
+ latest-changes:
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: read
+ steps:
+ - name: Dump GitHub context
+ env:
+ GITHUB_CONTEXT: ${{ toJson(github) }}
+ run: echo "$GITHUB_CONTEXT"
+ - uses: actions/checkout@v6
+ with:
+ # To allow latest-changes to commit to the main branch
+ token: ${{ secrets.LATEST_CHANGES }}
+ - uses: tiangolo/latest-changes@0.4.1
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ latest_changes_file: ./release-notes.md
+ latest_changes_header: "## Latest Changes"
+ end_regex: "^## "
+ debug_logs: true
+ label_header_prefix: "### "
diff --git a/.github/workflows/playwright.yml b/.github/workflows/playwright.yml
new file mode 100644
index 0000000000..3f9e0a2112
--- /dev/null
+++ b/.github/workflows/playwright.yml
@@ -0,0 +1,121 @@
+name: Playwright Tests
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ types:
+ - opened
+ - synchronize
+ workflow_dispatch:
+ inputs:
+ debug_enabled:
+ description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'
+ required: false
+ default: 'false'
+
+jobs:
+ changes:
+ runs-on: ubuntu-latest
+ # Set job outputs to values from filter step
+ outputs:
+ changed: ${{ steps.filter.outputs.changed }}
+ steps:
+ - uses: actions/checkout@v6
+ # For pull requests it's not necessary to checkout the code but for the main branch it is
+ - uses: dorny/paths-filter@v3
+ id: filter
+ with:
+ filters: |
+ changed:
+ - backend/**
+ - frontend/**
+ - .env
+ - compose*.yml
+ - .github/workflows/playwright.yml
+
+ test-playwright:
+ needs:
+ - changes
+ if: ${{ needs.changes.outputs.changed == 'true' }}
+ timeout-minutes: 60
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ shardIndex: [1, 2, 3, 4]
+ shardTotal: [4]
+ fail-fast: false
+ steps:
+ - uses: actions/checkout@v6
+ - uses: oven-sh/setup-bun@v2
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.10'
+ - name: Setup tmate session
+ uses: mxschmitt/action-tmate@v3
+ if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.debug_enabled == 'true' }}
+ with:
+ limit-access-to-actor: true
+ - name: Install uv
+ uses: astral-sh/setup-uv@v7
+ - run: uv sync
+ working-directory: backend
+ - run: bun ci
+ working-directory: frontend
+ - run: bash scripts/generate-client.sh
+ - run: docker compose build
+ - run: docker compose down -v --remove-orphans
+ - name: Run Playwright tests
+ run: docker compose run --rm playwright bunx playwright test --fail-on-flaky-tests --trace=retain-on-failure --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
+ - run: docker compose down -v --remove-orphans
+ - name: Upload blob report to GitHub Actions Artifacts
+ if: ${{ !cancelled() }}
+ uses: actions/upload-artifact@v6
+ with:
+ name: blob-report-${{ matrix.shardIndex }}
+ path: frontend/blob-report
+ include-hidden-files: true
+ retention-days: 1
+
+ merge-playwright-reports:
+ needs:
+ - test-playwright
+ - changes
+ # Merge reports after playwright-tests, even if some shards have failed
+ if: ${{ !cancelled() && needs.changes.outputs.changed == 'true' }}
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+ - uses: oven-sh/setup-bun@v2
+ - name: Install dependencies
+ run: bun ci
+ - name: Download blob reports from GitHub Actions Artifacts
+ uses: actions/download-artifact@v7
+ with:
+ path: frontend/all-blob-reports
+ pattern: blob-report-*
+ merge-multiple: true
+ - name: Merge into HTML Report
+ run: bunx playwright merge-reports --reporter html ./all-blob-reports
+ working-directory: frontend
+ - name: Upload HTML report
+ uses: actions/upload-artifact@v6
+ with:
+ name: html-report--attempt-${{ github.run_attempt }}
+ path: frontend/playwright-report
+ retention-days: 30
+ include-hidden-files: true
+
+ # https://github.com/marketplace/actions/alls-green#why
+ alls-green-playwright: # This job does nothing and is only used for the branch protection
+ if: always()
+ needs:
+ - test-playwright
+ runs-on: ubuntu-latest
+ steps:
+ - name: Decide whether the needed jobs succeeded or failed
+ uses: re-actors/alls-green@release/v1
+ with:
+ jobs: ${{ toJSON(needs) }}
+ allowed-skips: test-playwright
diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml
new file mode 100644
index 0000000000..b609751643
--- /dev/null
+++ b/.github/workflows/pre-commit.yml
@@ -0,0 +1,94 @@
+name: pre-commit
+
+on:
+ pull_request:
+ types:
+ - opened
+ - synchronize
+
+env:
+ # Forks and Dependabot don't have access to secrets
+ HAS_SECRETS: ${{ secrets.PRE_COMMIT != '' }}
+
+jobs:
+ pre-commit:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Dump GitHub context
+ env:
+ GITHUB_CONTEXT: ${{ toJson(github) }}
+ run: echo "$GITHUB_CONTEXT"
+ - uses: actions/checkout@v6
+ name: Checkout PR for own repo
+ if: env.HAS_SECRETS == 'true'
+ with:
+ # To be able to commit it needs to fetch the head of the branch, not the
+ # merge commit
+ ref: ${{ github.head_ref }}
+ # And it needs the full history to be able to compute diffs
+ fetch-depth: 0
+ # A token other than the default GITHUB_TOKEN is needed to be able to trigger CI
+ token: ${{ secrets.PRE_COMMIT }}
+ # pre-commit lite ci needs the default checkout configs to work
+ - uses: actions/checkout@v6
+ name: Checkout PR for fork
+ if: env.HAS_SECRETS == 'false'
+ with:
+ # To be able to commit it needs the head branch of the PR, the remote one
+ ref: ${{ github.event.pull_request.head.sha }}
+ fetch-depth: 0
+ - uses: oven-sh/setup-bun@v2
+ - name: Set up Python
+ uses: actions/setup-python@v6
+ with:
+ python-version: "3.11"
+ - name: Setup uv
+ uses: astral-sh/setup-uv@v7
+ with:
+ cache-dependency-glob: |
+ requirements**.txt
+ pyproject.toml
+ uv.lock
+ - name: Install backend dependencies
+ run: uv sync --all-packages
+ - name: Install frontend dependencies
+ run: bun ci
+ - name: Run prek - pre-commit
+ id: precommit
+ run: uvx prek run --from-ref origin/${GITHUB_BASE_REF} --to-ref HEAD --show-diff-on-failure
+ continue-on-error: true
+ - name: Commit and push changes
+ if: env.HAS_SECRETS == 'true'
+ run: |
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ git add -A
+ if git diff --staged --quiet; then
+ echo "No changes to commit"
+ else
+ git commit -m "๐จ Auto format and update with pre-commit"
+ git push
+ fi
+ - uses: pre-commit-ci/lite-action@v1.1.0
+ if: env.HAS_SECRETS == 'false'
+ with:
+ msg: ๐จ Auto format and update with pre-commit
+ - name: Error out on pre-commit errors
+ if: steps.precommit.outcome == 'failure'
+ run: exit 1
+
+ # https://github.com/marketplace/actions/alls-green#why
+ pre-commit-alls-green: # This job does nothing and is only used for the branch protection
+ if: always()
+ needs:
+ - pre-commit
+ runs-on: ubuntu-latest
+ steps:
+ - name: Dump GitHub context
+ env:
+ GITHUB_CONTEXT: ${{ toJson(github) }}
+ run: echo "$GITHUB_CONTEXT"
+ - name: Decide whether the needed jobs succeeded or failed
+ uses: re-actors/alls-green@release/v1
+ with:
+ jobs: ${{ toJSON(needs) }}
diff --git a/.github/workflows/smokeshow.yml b/.github/workflows/smokeshow.yml
new file mode 100644
index 0000000000..a49c90d63a
--- /dev/null
+++ b/.github/workflows/smokeshow.yml
@@ -0,0 +1,34 @@
+name: Smokeshow
+
+on:
+ workflow_run:
+ workflows: [Test Backend]
+ types: [completed]
+
+jobs:
+ smokeshow:
+ runs-on: ubuntu-latest
+ permissions:
+ actions: read
+ statuses: write
+
+ steps:
+ - uses: actions/checkout@v6
+ - uses: actions/setup-python@v6
+ with:
+ python-version: "3.13"
+ - run: pip install smokeshow
+ - uses: actions/download-artifact@v7
+ with:
+ name: coverage-html
+ path: backend/htmlcov
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ run-id: ${{ github.event.workflow_run.id }}
+ - run: smokeshow upload backend/htmlcov
+ env:
+ SMOKESHOW_GITHUB_STATUS_DESCRIPTION: Coverage {coverage-percentage}
+ SMOKESHOW_GITHUB_COVERAGE_THRESHOLD: 90
+ SMOKESHOW_GITHUB_CONTEXT: coverage
+ SMOKESHOW_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ SMOKESHOW_GITHUB_PR_HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
+ SMOKESHOW_AUTH_KEY: ${{ secrets.SMOKESHOW_AUTH_KEY }}
diff --git a/.github/workflows/test-backend.yml b/.github/workflows/test-backend.yml
new file mode 100644
index 0000000000..c103ae4392
--- /dev/null
+++ b/.github/workflows/test-backend.yml
@@ -0,0 +1,41 @@
+name: Test Backend
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ types:
+ - opened
+ - synchronize
+
+jobs:
+ test-backend:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v6
+ - name: Set up Python
+ uses: actions/setup-python@v6
+ with:
+ python-version: "3.10"
+ - name: Install uv
+ uses: astral-sh/setup-uv@v7
+ - run: docker compose down -v --remove-orphans
+ - run: docker compose up -d db mailcatcher
+ - name: Migrate DB
+ run: uv run bash scripts/prestart.sh
+ working-directory: backend
+ - name: Run tests
+ run: uv run bash scripts/tests-start.sh "Coverage for ${{ github.sha }}"
+ working-directory: backend
+ - run: docker compose down -v --remove-orphans
+ - name: Store coverage files
+ uses: actions/upload-artifact@v6
+ with:
+ name: coverage-html
+ path: backend/htmlcov
+ include-hidden-files: true
+ - name: Coverage report
+ run: uv run coverage report --fail-under=90
+ working-directory: backend
diff --git a/.github/workflows/test-docker-compose.yml b/.github/workflows/test-docker-compose.yml
new file mode 100644
index 0000000000..8054e5eafd
--- /dev/null
+++ b/.github/workflows/test-docker-compose.yml
@@ -0,0 +1,26 @@
+name: Test Docker Compose
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ types:
+ - opened
+ - synchronize
+
+jobs:
+
+ test-docker-compose:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v6
+ - run: docker compose build
+ - run: docker compose down -v --remove-orphans
+ - run: docker compose up -d --wait backend frontend adminer
+ - name: Test backend is up
+ run: curl http://localhost:8000/api/v1/utils/health-check
+ - name: Test frontend is up
+ run: curl http://localhost:5173
+ - run: docker compose down -v --remove-orphans
diff --git a/.gitignore b/.gitignore
index 5d778b370c..f903ab6066 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,7 @@
-.vscode
-testing-project
-.mypy_cache
-poetry.lock
-dev-link/
+.vscode/*
+!.vscode/extensions.json
+node_modules/
+/test-results/
+/playwright-report/
+/blob-report/
+/playwright/.cache/
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000000..fc718ea90d
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,48 @@
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.4.0
+ hooks:
+ - id: check-added-large-files
+ - id: check-toml
+ - id: check-yaml
+ args:
+ - --unsafe
+ - id: end-of-file-fixer
+ exclude: |
+ (?x)^(
+ frontend/src/client/.*|
+ backend/app/email-templates/build/.*
+ )$
+ - id: trailing-whitespace
+ exclude: ^frontend/src/client/.*
+ - repo: local
+ hooks:
+ - id: local-biome-check
+ name: biome check
+ entry: npm run lint
+ language: system
+ types: [text]
+ files: ^frontend/
+
+ - id: local-ruff-check
+ name: ruff check
+ entry: uv run ruff check --force-exclude --fix --exit-non-zero-on-fix
+ require_serial: true
+ language: unsupported
+ types: [python]
+
+ - id: local-ruff-format
+ name: ruff format
+ entry: uv run ruff format --force-exclude --exit-non-zero-on-format
+ require_serial: true
+ language: unsupported
+ types: [python]
+
+ - id: generate-frontend-sdk
+ name: Generate Frontend SDK
+ entry: bash ./scripts/generate-client.sh
+ pass_filenames: false
+ language: unsupported
+ files: ^backend/.*$|^scripts/generate-client\.sh$
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index ad7e0349a3..0000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-sudo: required
-
-language: python
-
-install:
- - pip install cookiecutter
-
-services:
- - docker
-
-script:
-- bash ./scripts/test.sh
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
new file mode 100644
index 0000000000..34097e1ef3
--- /dev/null
+++ b/.vscode/extensions.json
@@ -0,0 +1,14 @@
+{
+ "recommendations": [
+ "astral-sh.ty",
+ "biomejs.biome",
+ "bradlc.vscode-tailwindcss",
+ "charliermarsh.ruff",
+ "docker.docker",
+ "github.vscode-github-actions",
+ "mjmlio.vscode-mjml",
+ "ms-playwright.playwright",
+ "ms-python.python",
+ "tombi-toml.tombi"
+ ]
+}
diff --git a/.vscode/launch.json b/.vscode/launch.json
new file mode 100644
index 0000000000..24eae850d0
--- /dev/null
+++ b/.vscode/launch.json
@@ -0,0 +1,28 @@
+{
+ // Use IntelliSense to learn about possible attributes.
+ // Hover to view descriptions of existing attributes.
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "name": "Debug FastAPI Project backend: Python Debugger",
+ "type": "debugpy",
+ "request": "launch",
+ "module": "uvicorn",
+ "args": [
+ "app.main:app",
+ "--reload"
+ ],
+ "cwd": "${workspaceFolder}/backend",
+ "jinja": true,
+ "envFile": "${workspaceFolder}/.env",
+ },
+ {
+ "type": "chrome",
+ "request": "launch",
+ "name": "Debug Frontend: Launch Chrome against http://localhost:5173",
+ "url": "http://localhost:5173",
+ "webRoot": "${workspaceFolder}/frontend"
+ },
+ ]
+}
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index d95d76171c..7e725a3fe5 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,83 +1,65 @@
# Contributing
-Here are some short guidelines to guide you if you want to contribute to the development of the Full Stack FastAPI PostgreSQL project generator itself.
+Thank you for your interest in contributing to the Full Stack FastAPI Template! ๐
-After you clone the project, there are several scripts that can help during development.
+## Discussions First
-* `./scripts/dev-fsfp.sh`:
+For **big changes** (new features, architectural changes, significant refactoring), please start by opening a [GitHub Discussion](https://github.com/fastapi/full-stack-fastapi-template/discussions) first. This allows the community and maintainers to provide feedback on the approach before you invest significant time in implementation.
-Generate a new default project `dev-fsfp`.
+For small, straightforward changes, you can go directly to a Pull Request without starting a discussion first. This includes:
-Call it from one level above the project directory. So, if the project is at `~/code/full-stack-fastapi-postgresql/`, call it from `~/code/`, like:
+- Typos and grammatical fixes
+- Small reproducible bug fixes
+- Fixing lint warnings or type errors
+- Minor code improvements (e.g., removing unused code)
-```console
-$ cd ~/code/
+## Developing
-$ bash ./full-stack-fastapi-postgresql/scripts/dev-fsfp.sh
-```
+For detailed instructions on setting up your development environment, running the stack, linting, pre-commit hooks, and more, see the [Development Guide](development.md).
-It will generate a new project with all the defaults at `~/code/dev-fsfp/`.
+## Pull Requests
-You can go to that directory with a full new project, edit files and test things, for example:
+When submitting a pull request:
-```console
-$ cd ./dev-fsfp/
+1. Make sure all tests pass before submitting.
+2. Keep PRs focused on a single change.
+3. Update tests if you're changing functionality.
+4. Reference any related issues in your PR description.
-$ docker-compose up -d
-```
+## Automated Code and AI
-It is outside of the project generator directory to let you add Git to it and compare versions and changes.
+You are encouraged to use all the tools you want to do your work and contribute as efficiently as possible, this includes AI (LLM) tools, etc. Nevertheless, contributions should have meaningful human intervention, judgement, context, etc.
-* `./scripts/dev-fsfp-back.sh`:
+If the **human effort** put in a PR, e.g. writing LLM prompts, is **less** than the **effort we would need to put** to **review it**, please **don't** submit the PR.
-Move the changes from a project `dev-fsfp` back to the project generator.
+Think of it this way: we can already write LLM prompts or run automated tools ourselves, and that would be faster than reviewing external PRs.
-You would call it after calling `./scripts/dev-fsfp.sh` and adding some modifications to `dev-fsfp`.
+### Closing Automated and AI PRs
-Call it from one level above the project directory. So, if the project is at `~/code/full-stack-fastapi-postgresql/`, call it from `~/code/`, like:
+If we see PRs that seem AI generated or automated in similar ways, we'll flag them and close them.
-```console
-$ cd ~/code/
+The same applies to comments and descriptions, please don't copy paste the content generated by an LLM.
-$ bash ./full-stack-fastapi-postgresql/scripts/dev-fsfp-back.sh
-```
+### Human Effort Denial of Service
-That will also contain all the generated files with the generated variables, but it will let you compare the changes in `dev-fsfp` and the source in the project generator with git, and see what to commit.
+Using automated tools and AI to submit PRs or comments that we have to carefully review and handle would be the equivalent of a [Denial-of-service attack](https://en.wikipedia.org/wiki/Denial-of-service_attack) on our human effort.
-* `./scripts/discard-dev-files.sh`:
+It would be very little effort from the person submitting the PR (an LLM prompt) that generates a large amount of effort on our side (carefully reviewing code).
-After using `./scripts/dev-fsfp-back.sh`, there will be a bunch of generated files with the variables for the generated project that you don't want to commit, like `README.md` and `.gitlab-ci.yml`.
+Please don't do that.
-To discard all those changes at once, run `discard-dev-files.sh` from the root of the project, e.g.:
+We'll need to block accounts that spam us with repeated automated PRs or comments.
-```console
-$ cd ~/code/full-stack-fastapi-postgresql/
+### Use Tools Wisely
-$ bash ./scripts/dev-fsfp-back.sh
-```
+As Uncle Ben said:
-* `./scripts/test.sh`:
+> With great ~~power~~ **tools** comes great responsibility.
-Run the tests. It creates a project `testing-project` *inside* of the project generator and runs its tests.
+Avoid inadvertently doing harm.
-Call it from the root of the project, e.g.:
+You have amazing tools at hand, use them wisely to help effectively.
-```console
-$ cd ~/code/full-stack-fastapi-postgresql/
+## Questions?
-$ bash ./scripts/test.sh
-```
-
-* `./scripts/dev-link.sh`:
-
-Set up a local directory with links to the files for live development with the source files.
-
-This script generates a project `dev-link` *inside* the project generator, just to generate the `.env` and `./frontend/.env` files.
-
-Then it removes everything except those 2 files.
-
-Then it creates links for each of the source files, and adds those 2 files back.
-
-The end result is that you can go into the `dev-link` directory and develop locally with it as if it was a generated project, with all the variables set. But all the changes are actually done directly in the source files.
-
-This is probably a lot faster to iterate than using `./scripts/dev-fsfp.sh`. But it's tested only in Linux, it might not work in other systems.
+If you have questions about contributing, feel free to open a [GitHub Discussion](https://github.com/fastapi/full-stack-fastapi-template/discussions).
diff --git a/README.md b/README.md
index d85e943834..a9049b4779 100644
--- a/README.md
+++ b/README.md
@@ -1,270 +1,233 @@
-# Full Stack FastAPI and PostgreSQL - Base Project Generator
+# Full Stack FastAPI Template
+
+
+
+
+
+## Technology Stack and Features
+
+- โก [**FastAPI**](https://fastapi.tiangolo.com) for the Python backend API.
+ - ๐งฐ [SQLModel](https://sqlmodel.tiangolo.com) for the Python SQL database interactions (ORM).
+ - ๐ [Pydantic](https://docs.pydantic.dev), used by FastAPI, for the data validation and settings management.
+ - ๐พ [PostgreSQL](https://www.postgresql.org) as the SQL database.
+- ๐ [React](https://react.dev) for the frontend.
+ - ๐ Using TypeScript, hooks, [Vite](https://vitejs.dev), and other parts of a modern frontend stack.
+ - ๐จ [Tailwind CSS](https://tailwindcss.com) and [shadcn/ui](https://ui.shadcn.com) for the frontend components.
+ - ๐ค An automatically generated frontend client.
+ - ๐งช [Playwright](https://playwright.dev) for End-to-End testing.
+ - ๐ฆ Dark mode support.
+- ๐ [Docker Compose](https://www.docker.com) for development and production.
+- ๐ Secure password hashing by default.
+- ๐ JWT (JSON Web Token) authentication.
+- ๐ซ Email based password recovery.
+- ๐ฌ [Mailcatcher](https://mailcatcher.me) for local email testing during development.
+- โ Tests with [Pytest](https://pytest.org).
+- ๐ [Traefik](https://traefik.io) as a reverse proxy / load balancer.
+- ๐ข Deployment instructions using Docker Compose, including how to set up a frontend Traefik proxy to handle automatic HTTPS certificates.
+- ๐ญ CI (continuous integration) and CD (continuous deployment) based on GitHub Actions.
-[](https://travis-ci.com/tiangolo/full-stack-fastapi-postgresql)
+### Dashboard Login
-Generate a backend and frontend stack using Python, including interactive API documentation.
+[](https://github.com/fastapi/full-stack-fastapi-template)
-### Interactive API documentation
+### Dashboard - Admin
-[](https://github.com/tiangolo/full-stack-fastapi-postgresql)
+[](https://github.com/fastapi/full-stack-fastapi-template)
-### Alternative API documentation
+### Dashboard - Items
-[](https://github.com/tiangolo/full-stack-fastapi-postgresql)
+[](https://github.com/fastapi/full-stack-fastapi-template)
-### Dashboard Login
+### Dashboard - Dark Mode
+
+[](https://github.com/fastapi/full-stack-fastapi-template)
+
+### Interactive API Documentation
+
+[](https://github.com/fastapi/full-stack-fastapi-template)
+
+## How To Use It
-[](https://github.com/tiangolo/full-stack-fastapi-postgresql)
-
-### Dashboard - Create User
-
-[](https://github.com/tiangolo/full-stack-fastapi-postgresql)
-
-## Features
-
-* Full **Docker** integration (Docker based).
-* Docker Swarm Mode deployment.
-* **Docker Compose** integration and optimization for local development.
-* **Production ready** Python web server using Uvicorn and Gunicorn.
-* Python **FastAPI** backend:
- * **Fast**: Very high performance, on par with **NodeJS** and **Go** (thanks to Starlette and Pydantic).
- * **Intuitive**: Great editor support. Completion everywhere. Less time debugging.
- * **Easy**: Designed to be easy to use and learn. Less time reading docs.
- * **Short**: Minimize code duplication. Multiple features from each parameter declaration.
- * **Robust**: Get production-ready code. With automatic interactive documentation.
- * **Standards-based**: Based on (and fully compatible with) the open standards for APIs: OpenAPI and JSON Schema.
- * **Many other features** including automatic validation, serialization, interactive documentation, authentication with OAuth2 JWT tokens, etc.
-* **Secure password** hashing by default.
-* **JWT token** authentication.
-* **SQLAlchemy** models (independent of Flask extensions, so they can be used with Celery workers directly).
-* Basic starting models for users (modify and remove as you need).
-* **Alembic** migrations.
-* **CORS** (Cross Origin Resource Sharing).
-* **Celery** worker that can import and use models and code from the rest of the backend selectively.
-* REST backend tests based on **Pytest**, integrated with Docker, so you can test the full API interaction, independent on the database. As it runs in Docker, it can build a new data store from scratch each time (so you can use ElasticSearch, MongoDB, CouchDB, or whatever you want, and just test that the API works).
-* Easy Python integration with **Jupyter Kernels** for remote or in-Docker development with extensions like Atom Hydrogen or Visual Studio Code Jupyter.
-* **Vue** frontend:
- * Generated with Vue CLI.
- * **JWT Authentication** handling.
- * Login view.
- * After login, main dashboard view.
- * Main dashboard with user creation and edition.
- * Self user edition.
- * **Vuex**.
- * **Vue-router**.
- * **Vuetify** for beautiful material design components.
- * **TypeScript**.
- * Docker server based on **Nginx** (configured to play nicely with Vue-router).
- * Docker multi-stage building, so you don't need to save or commit compiled code.
- * Frontend tests ran at build time (can be disabled too).
- * Made as modular as possible, so it works out of the box, but you can re-generate with Vue CLI or create it as you need, and re-use what you want.
- * It's also easy to remove it if you have an API-only app, check the instructions in the generated `README.md`.
-* **PGAdmin** for PostgreSQL database, you can modify it to use PHPMyAdmin and MySQL easily.
-* **Flower** for Celery jobs monitoring.
-* Load balancing between frontend and backend with **Traefik**, so you can have both under the same domain, separated by path, but served by different containers.
-* Traefik integration, including Let's Encrypt **HTTPS** certificates automatic generation.
-* GitLab **CI** (continuous integration), including frontend and backend testing.
-
-## How to use it
-
-Go to the directory where you want to create your project and run:
+You can **just fork or clone** this repository and use it as is.
+
+โจ It just works. โจ
+
+### How to Use a Private Repository
+
+If you want to have a private repository, GitHub won't allow you to simply fork it as it doesn't allow changing the visibility of forks.
+
+But you can do the following:
+
+- Create a new GitHub repo, for example `my-full-stack`.
+- Clone this repository manually, set the name with the name of the project you want to use, for example `my-full-stack`:
```bash
-pip install cookiecutter
-cookiecutter https://github.com/tiangolo/full-stack-fastapi-postgresql
+git clone git@github.com:fastapi/full-stack-fastapi-template.git my-full-stack
```
-### Generate passwords
+- Enter into the new directory:
-You will be asked to provide passwords and secret keys for several components. Open another terminal and run:
+```bash
+cd my-full-stack
+```
+
+- Set the new origin to your new repository, copy it from the GitHub interface, for example:
```bash
-openssl rand -hex 32
-# Outputs something like: 99d3b1f01aa639e4a76f4fc281fc834747a543720ba4c8a8648ba755aef9be7f
+git remote set-url origin git@github.com:octocat/my-full-stack.git
```
-Copy the contents and use that as password / secret key. And run that again to generate another secure key.
+- Add this repo as another "remote" to allow you to get updates later:
+```bash
+git remote add upstream git@github.com:fastapi/full-stack-fastapi-template.git
+```
-### Input variables
+- Push the code to your new repository:
-The generator (cookiecutter) will ask you for some data, you might want to have at hand before generating the project.
+```bash
+git push -u origin master
+```
-The input variables, with their default values (some auto generated) are:
+### Update From the Original Template
-* `project_name`: The name of the project
-* `project_slug`: The development friendly name of the project. By default, based on the project name
-* `domain_main`: The domain in where to deploy the project for production (from the branch `production`), used by the load balancer, backend, etc. By default, based on the project slug.
-* `domain_staging`: The domain in where to deploy while staging (before production) (from the branch `master`). By default, based on the main domain.
+After cloning the repository, and after doing changes, you might want to get the latest changes from this original template.
-* `docker_swarm_stack_name_main`: The name of the stack while deploying to Docker in Swarm mode for production. By default, based on the domain.
-* `docker_swarm_stack_name_staging`: The name of the stack while deploying to Docker in Swarm mode for staging. By default, based on the domain.
+- Make sure you added the original repository as a remote, you can check it with:
-* `secret_key`: Backend server secret key. Use the method above to generate it.
-* `first_superuser`: The first superuser generated, with it you will be able to create more users, etc. By default, based on the domain.
-* `first_superuser_password`: First superuser password. Use the method above to generate it.
-* `backend_cors_origins`: Origins (domains, more or less) that are enabled for CORS (Cross Origin Resource Sharing). This allows a frontend in one domain (e.g. `https://dashboard.example.com`) to communicate with this backend, that could be living in another domain (e.g. `https://api.example.com`). It can also be used to allow your local frontend (with a custom `hosts` domain mapping, as described in the project's `README.md`) that could be living in `http://dev.example.com:8080` to communicate with the backend at `https://stag.example.com`. Notice the `http` vs `https` and the `dev.` prefix for local development vs the "staging" `stag.` prefix. By default, it includes origins for production, staging and development, with ports commonly used during local development by several popular frontend frameworks (Vue with `:8080`, React, Angular).
-* `smtp_port`: Port to use to send emails via SMTP. By default `587`.
-* `smtp_host`: Host to use to send emails, it would be given by your email provider, like Mailgun, Sparkpost, etc.
-* `smtp_user`: The user to use in the SMTP connection. The value will be given by your email provider.
-* `smtp_password`: The password to be used in the SMTP connection. The value will be given by the email provider.
-* `smtp_emails_from_email`: The email account to use as the sender in the notification emails, it would be something like `info@your-custom-domain.com`.
-
-* `postgres_password`: Postgres database password. Use the method above to generate it. (You could easily modify it to use MySQL, MariaDB, etc).
-* `pgadmin_default_user`: PGAdmin default user, to log-in to the PGAdmin interface.
-* `pgadmin_default_user_password`: PGAdmin default user password. Generate it with the method above.
-
-* `traefik_constraint_tag`: The tag to be used by the internal Traefik load balancer (for example, to divide requests between backend and frontend) for production. Used to separate this stack from any other stack you might have. This should identify each stack in each environment (production, staging, etc).
-* `traefik_constraint_tag_staging`: The Traefik tag to be used while on staging.
-* `traefik_public_constraint_tag`: The tag that should be used by stack services that should communicate with the public.
+```bash
+git remote -v
-* `flower_auth`: Basic HTTP authentication for flower, in the form`user:password`. By default: "`admin:changethis`".
+origin git@github.com:octocat/my-full-stack.git (fetch)
+origin git@github.com:octocat/my-full-stack.git (push)
+upstream git@github.com:fastapi/full-stack-fastapi-template.git (fetch)
+upstream git@github.com:fastapi/full-stack-fastapi-template.git (push)
+```
-* `sentry_dsn`: Key URL (DSN) of Sentry, for live error reporting. You can use the open source version or a free account. E.g.: `https://1234abcd:5678ef@sentry.example.com/30`.
+- Pull the latest changes without merging:
-* `docker_image_prefix`: Prefix to use for Docker image names. If you are using GitLab Docker registry it would be based on your code repository. E.g.: `git.example.com/development-team/my-awesome-project/`.
-* `docker_image_backend`: Docker image name for the backend. By default, it will be based on your Docker image prefix, e.g.: `git.example.com/development-team/my-awesome-project/backend`. And depending on your environment, a different tag will be appended ( `prod`, `stag`, `branch` ). So, the final image names used will be like: `git.example.com/development-team/my-awesome-project/backend:prod`.
-* `docker_image_celeryworker`: Docker image for the celery worker. By default, based on your Docker image prefix.
-* `docker_image_frontend`: Docker image for the frontend. By default, based on your Docker image prefix.
+```bash
+git pull --no-commit upstream master
+```
-## How to deploy
+This will download the latest changes from this template without committing them, that way you can check everything is right before committing.
-This stack can be adjusted and used with several deployment options that are compatible with Docker Compose, but it is designed to be used in a cluster controlled with pure Docker in Swarm Mode with a Traefik main load balancer proxy handling automatic HTTPS certificates, using the ideas from DockerSwarm.rocks.
+- If there are conflicts, solve them in your editor.
-Please refer to DockerSwarm.rocks to see how to deploy such a cluster in 20 minutes.
+- Once you are done, commit the changes:
-## More details
+```bash
+git merge --continue
+```
-After using this generator, your new project (the directory created) will contain an extensive `README.md` with instructions for development, deployment, etc. You can pre-read [the project `README.md` template here too](./{{cookiecutter.project_slug}}/README.md).
+### Configure
-## Sibling project generators
+You can then update configs in the `.env` files to customize your configurations.
-* Full Stack FastAPI Couchbase: [https://github.com/tiangolo/full-stack-fastapi-couchbase](https://github.com/tiangolo/full-stack-fastapi-couchbase).
+Before deploying it, make sure you change at least the values for:
-## Release Notes
+- `SECRET_KEY`
+- `FIRST_SUPERUSER_PASSWORD`
+- `POSTGRES_PASSWORD`
+
+You can (and should) pass these as environment variables from secrets.
+
+Read the [deployment.md](./deployment.md) docs for more details.
+
+### Generate Secret Keys
+
+Some environment variables in the `.env` file have a default value of `changethis`.
+
+You have to change them with a secret key, to generate secret keys you can run the following command:
+
+```bash
+python -c "import secrets; print(secrets.token_urlsafe(32))"
+```
-### Latest Changes
+Copy the content and use that as password / secret key. And run that again to generate another secure key.
-* Update issue-manager. PR [#211](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/211).
-* Add [GitHub Sponsors](https://github.com/sponsors/tiangolo) button. PR [#201](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/201).
-* Add consistent errors for env vars not set. PR [#200](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/200).
-* Upgrade Traefik to version 2, keeping in sync with DockerSwarm.rocks. PR [#199](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/199).
-* Add docs about reporting test coverage in HTML. PR [#161](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/161).
-* Run tests with `TestClient`. PR [#160](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/160).
-* Refactor backend:
- * Simplify configs for tools and format to better support editor integration.
- * Add mypy configurations and plugins.
- * Add types to all the codebase.
- * Update types for SQLAlchemy models with plugin.
- * Update and refactor CRUD utils.
- * Refactor DB sessions to use dependencies with `yield`.
- * Refactor dependencies, security, CRUD, models, schemas, etc. To simplify code and improve autocompletion.
- * Change from PyJWT to Python-JOSE as it supports additional use cases.
- * Fix JWT tokens using user email/ID as the subject in `sub`.
- * PR [#158](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/158).
-* Add docs about removing the frontend, for an API-only app. PR [#156](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/156).
-* Simplify scripts and development, update docs and configs. PR [#155](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/155).
-* Simplify `docker-compose.*.yml` files, refactor deployment to reduce config files. PR [#153](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/153).
-* Simplify env var files, merge to a single `.env` file. PR [#151](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/151).
+## How To Use It - Alternative With Copier
-### 0.5.0
+This repository also supports generating a new project using [Copier](https://copier.readthedocs.io).
-* Make the Traefik public network a fixed default of `traefik-public` as done in DockerSwarm.rocks, to simplify development and iteration of the project generator. PR [#150](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/150).
-* Update to PostgreSQL 12. PR [#148](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/148). by [@RCheese](https://github.com/RCheese).
-* Use Poetry for package management. Initial PR [#144](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/144) by [@RCheese](https://github.com/RCheese).
-* Fix Windows line endings for shell scripts after project generation with Cookiecutter hooks. PR [#149](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/149).
-* Upgrade Vue CLI to version 4. PR [#120](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/120) by [@br3ndonland](https://github.com/br3ndonland).
-* Remove duplicate `login` tag. PR [#135](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/135) by [@Nonameentered](https://github.com/Nonameentered).
-* Fix showing email in dashboard when there's no user's full name. PR [#129](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/129) by [@rlonka](https://github.com/rlonka).
-* Format code with Black and Flake8. PR [#121](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/121) by [@br3ndonland](https://github.com/br3ndonland).
-* Simplify SQLAlchemy Base class. PR [#117](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/117) by [@airibarne](https://github.com/airibarne).
-* Update CRUD utils for users, handling password hashing. PR [#106](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/106) by [@mocsar](https://github.com/mocsar).
-* Use `.` instead of `source` for interoperability. PR [#98](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/98) by [@gucharbon](https://github.com/gucharbon).
-* Use Pydantic's `BaseSettings` for settings/configs and env vars. PR [#87](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/87) by [@StephenBrown2](https://github.com/StephenBrown2).
-* Remove `package-lock.json` to let everyone lock their own versions (depending on OS, etc).
-* Simplify Traefik service labels PR [#139](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/139).
-* Add email validation. PR [#40](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/40) by [@kedod](https://github.com/kedod).
-* Fix typo in README. PR [#83](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/83) by [@ashears](https://github.com/ashears).
-* Fix typo in README. PR [#80](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/80) by [@abjoker](https://github.com/abjoker).
-* Fix function name `read_item` and response code. PR [#74](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/74) by [@jcaguirre89](https://github.com/jcaguirre89).
-* Fix typo in comment. PR [#70](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/70) by [@daniel-butler](https://github.com/daniel-butler).
-* Fix Flower Docker configuration. PR [#37](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/37) by [@dmontagu](https://github.com/dmontagu).
-* Add new CRUD utils based on DB and Pydantic models. Initial PR [#23](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/23) by [@ebreton](https://github.com/ebreton).
-* Add normal user testing Pytest fixture. PR [#20](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/20) by [@ebreton](https://github.com/ebreton).
+It will copy all the files, ask you configuration questions, and update the `.env` files with your answers.
-### 0.4.0
+### Install Copier
-* Fix security on resetting a password. Receive token as body, not query. PR [#34](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/34).
+You can install Copier with:
-* Fix security on resetting a password. Receive it as body, not query. PR [#33](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/33) by [@dmontagu](https://github.com/dmontagu).
+```bash
+pip install copier
+```
-* Fix SQLAlchemy class lookup on initialization. PR [#29](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/29) by [@ebreton](https://github.com/ebreton).
+Or better, if you have [`pipx`](https://pipx.pypa.io/), you can run it with:
-* Fix SQLAlchemy operation errors on database restart. PR [#32](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/32) by [@ebreton](https://github.com/ebreton).
+```bash
+pipx install copier
+```
-* Fix locations of scripts in generated README. PR [#19](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/19) by [@ebreton](https://github.com/ebreton).
+**Note**: If you have `pipx`, installing copier is optional, you could run it directly.
-* Forward arguments from script to `pytest` inside container. PR [#17](https://github.com/tiangolo/full-stack-fastapi-postgresql/pull/17) by [@ebreton](https://github.com/ebreton).
+### Generate a Project With Copier
-* Update development scripts.
+Decide a name for your new project's directory, you will use it below. For example, `my-awesome-project`.
-* Read Alembic configs from env vars. PR #9 by @ebreton.
+Go to the directory that will be the parent of your project, and run the command with your project's name:
-* Create DB Item objects from all Pydantic model's fields.
+```bash
+copier copy https://github.com/fastapi/full-stack-fastapi-template my-awesome-project --trust
+```
-* Update Jupyter Lab installation and util script/environment variable for local development.
+If you have `pipx` and you didn't install `copier`, you can run it directly:
-### 0.3.0
+```bash
+pipx run copier copy https://github.com/fastapi/full-stack-fastapi-template my-awesome-project --trust
+```
-* PR #14:
- * Update CRUD utils to use types better.
- * Simplify Pydantic model names, from `UserInCreate` to `UserCreate`, etc.
- * Upgrade packages.
- * Add new generic "Items" models, crud utils, endpoints, and tests. To facilitate re-using them to create new functionality. As they are simple and generic (not like Users), it's easier to copy-paste and adapt them to each use case.
- * Update endpoints/*path operations* to simplify code and use new utilities, prefix and tags in `include_router`.
- * Update testing utils.
- * Update linting rules, relax vulture to reduce false positives.
- * Update migrations to include new Items.
- * Update project README.md with tips about how to start with backend.
+**Note** the `--trust` option is necessary to be able to execute a [post-creation script](https://github.com/fastapi/full-stack-fastapi-template/blob/master/.copier/update_dotenv.py) that updates your `.env` files.
-* Upgrade Python to 3.7 as Celery is now compatible too. PR #10 by @ebreton.
+### Input Variables
-### 0.2.2
+Copier will ask you for some data, you might want to have at hand before generating the project.
-* Fix frontend hijacking /docs in development. Using latest https://github.com/tiangolo/node-frontend with custom Nginx configs in frontend. PR #6.
+But don't worry, you can just update any of that in the `.env` files afterwards.
-### 0.2.1
+The input variables, with their default values (some auto generated) are:
-* Fix documentation for *path operation* to get user by ID. PR #4 by @mpclarkson in FastAPI.
+- `project_name`: (default: `"FastAPI Project"`) The name of the project, shown to API users (in .env).
+- `stack_name`: (default: `"fastapi-project"`) The name of the stack used for Docker Compose labels and project name (no spaces, no periods) (in .env).
+- `secret_key`: (default: `"changethis"`) The secret key for the project, used for security, stored in .env, you can generate one with the method above.
+- `first_superuser`: (default: `"admin@example.com"`) The email of the first superuser (in .env).
+- `first_superuser_password`: (default: `"changethis"`) The password of the first superuser (in .env).
+- `smtp_host`: (default: "") The SMTP server host to send emails, you can set it later in .env.
+- `smtp_user`: (default: "") The SMTP server user to send emails, you can set it later in .env.
+- `smtp_password`: (default: "") The SMTP server password to send emails, you can set it later in .env.
+- `emails_from_email`: (default: `"info@example.com"`) The email account to send emails from, you can set it later in .env.
+- `postgres_password`: (default: `"changethis"`) The password for the PostgreSQL database, stored in .env, you can generate one with the method above.
+- `sentry_dsn`: (default: "") The DSN for Sentry, if you are using it, you can set it later in .env.
-* Set `/start-reload.sh` as a command override for development by default.
+## Backend Development
-* Update generated README.
+Backend docs: [backend/README.md](./backend/README.md).
-### 0.2.0
+## Frontend Development
-**PR #2**:
+Frontend docs: [frontend/README.md](./frontend/README.md).
-* Simplify and update backend `Dockerfile`s.
-* Refactor and simplify backend code, improve naming, imports, modules and "namespaces".
-* Improve and simplify Vuex integration with TypeScript accessors.
-* Standardize frontend components layout, buttons order, etc.
-* Add local development scripts (to develop this project generator itself).
-* Add logs to startup modules to detect errors early.
-* Improve FastAPI dependency utilities, to simplify and reduce code (to require a superuser).
+## Deployment
-### 0.1.2
+Deployment docs: [deployment.md](./deployment.md).
-* Fix path operation to update self-user, set parameters as body payload.
+## Development
-### 0.1.1
+General development docs: [development.md](./development.md).
-Several bug fixes since initial publication, including:
+This includes using Docker Compose, custom local domains, `.env` configurations, etc.
+
+## Release Notes
-* Order of path operations for users.
-* Frontend sending login data in the correct format.
-* Add https://localhost variants to CORS.
+Check the file [release-notes.md](./release-notes.md).
## License
-This project is licensed under the terms of the MIT license.
+The Full Stack FastAPI Template is licensed under the terms of the MIT license.
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000000..0045fb8182
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,29 @@
+# Security Policy
+
+Security is very important for this project and its community. ๐
+
+Learn more about it below. ๐
+
+## Versions
+
+The latest version or release is supported.
+
+You are encouraged to write tests for your application and update your versions frequently after ensuring that your tests are passing. This way you will benefit from the latest features, bug fixes, and **security fixes**.
+
+## Reporting a Vulnerability
+
+If you think you found a vulnerability, and even if you are not sure about it, please report it right away by sending an email to: security@tiangolo.com. Please try to be as explicit as possible, describing all the steps and example code to reproduce the security issue.
+
+I (the author, [@tiangolo](https://twitter.com/tiangolo)) will review it thoroughly and get back to you.
+
+## Public Discussions
+
+Please restrain from publicly discussing a potential security vulnerability. ๐
+
+It's better to discuss privately and try to find a solution first, to limit the potential impact as much as possible.
+
+---
+
+Thanks for your help!
+
+The community and I thank you for that. ๐
diff --git a/backend/.dockerignore b/backend/.dockerignore
new file mode 100644
index 0000000000..c0de4abf73
--- /dev/null
+++ b/backend/.dockerignore
@@ -0,0 +1,8 @@
+# Python
+__pycache__
+app.egg-info
+*.pyc
+.mypy_cache
+.coverage
+htmlcov
+.venv
diff --git a/backend/.gitignore b/backend/.gitignore
new file mode 100644
index 0000000000..63f67bcd21
--- /dev/null
+++ b/backend/.gitignore
@@ -0,0 +1,8 @@
+__pycache__
+app.egg-info
+*.pyc
+.mypy_cache
+.coverage
+htmlcov
+.cache
+.venv
diff --git a/backend/Dockerfile b/backend/Dockerfile
new file mode 100644
index 0000000000..9f31dcd78a
--- /dev/null
+++ b/backend/Dockerfile
@@ -0,0 +1,45 @@
+FROM python:3.10
+
+ENV PYTHONUNBUFFERED=1
+
+# Install uv
+# Ref: https://docs.astral.sh/uv/guides/integration/docker/#installing-uv
+COPY --from=ghcr.io/astral-sh/uv:0.9.26 /uv /uvx /bin/
+
+# Compile bytecode
+# Ref: https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode
+ENV UV_COMPILE_BYTECODE=1
+
+# uv Cache
+# Ref: https://docs.astral.sh/uv/guides/integration/docker/#caching
+ENV UV_LINK_MODE=copy
+
+WORKDIR /app/
+
+# Place executables in the environment at the front of the path
+# Ref: https://docs.astral.sh/uv/guides/integration/docker/#using-the-environment
+ENV PATH="/app/.venv/bin:$PATH"
+
+# Install dependencies
+# Ref: https://docs.astral.sh/uv/guides/integration/docker/#intermediate-layers
+RUN --mount=type=cache,target=/root/.cache/uv \
+ --mount=type=bind,source=uv.lock,target=uv.lock \
+ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
+ uv sync --frozen --no-install-workspace --package app
+
+COPY ./backend/scripts /app/backend/scripts
+
+COPY ./backend/pyproject.toml ./backend/alembic.ini /app/backend/
+
+COPY ./backend/app /app/backend/app
+
+# Sync the project
+# Ref: https://docs.astral.sh/uv/guides/integration/docker/#intermediate-layers
+RUN --mount=type=cache,target=/root/.cache/uv \
+ --mount=type=bind,source=uv.lock,target=uv.lock \
+ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
+ uv sync --frozen --package app
+
+WORKDIR /app/backend/
+
+CMD ["fastapi", "run", "--workers", "4", "app/main.py"]
diff --git a/backend/README.md b/backend/README.md
new file mode 100644
index 0000000000..521ee3fa0a
--- /dev/null
+++ b/backend/README.md
@@ -0,0 +1,172 @@
+# FastAPI Project - Backend
+
+## Requirements
+
+* [Docker](https://www.docker.com/).
+* [uv](https://docs.astral.sh/uv/) for Python package and environment management.
+
+## Docker Compose
+
+Start the local development environment with Docker Compose following the guide in [../development.md](../development.md).
+
+## General Workflow
+
+By default, the dependencies are managed with [uv](https://docs.astral.sh/uv/), go there and install it.
+
+From `./backend/` you can install all the dependencies with:
+
+```console
+$ uv sync
+```
+
+Then you can activate the virtual environment with:
+
+```console
+$ source .venv/bin/activate
+```
+
+Make sure your editor is using the correct Python virtual environment, with the interpreter at `backend/.venv/bin/python`.
+
+Modify or add SQLModel models for data and SQL tables in `./backend/app/models.py`, API endpoints in `./backend/app/api/`, CRUD (Create, Read, Update, Delete) utils in `./backend/app/crud.py`.
+
+## VS Code
+
+There are already configurations in place to run the backend through the VS Code debugger, so that you can use breakpoints, pause and explore variables, etc.
+
+The setup is also already configured so you can run the tests through the VS Code Python tests tab.
+
+## Docker Compose Override
+
+During development, you can change Docker Compose settings that will only affect the local development environment in the file `compose.override.yml`.
+
+The changes to that file only affect the local development environment, not the production environment. So, you can add "temporary" changes that help the development workflow.
+
+For example, the directory with the backend code is synchronized in the Docker container, copying the code you change live to the directory inside the container. That allows you to test your changes right away, without having to build the Docker image again. It should only be done during development, for production, you should build the Docker image with a recent version of the backend code. But during development, it allows you to iterate very fast.
+
+There is also a command override that runs `fastapi run --reload` instead of the default `fastapi run`. It starts a single server process (instead of multiple, as would be for production) and reloads the process whenever the code changes. Have in mind that if you have a syntax error and save the Python file, it will break and exit, and the container will stop. After that, you can restart the container by fixing the error and running again:
+
+```console
+$ docker compose watch
+```
+
+There is also a commented out `command` override, you can uncomment it and comment the default one. It makes the backend container run a process that does "nothing", but keeps the container alive. That allows you to get inside your running container and execute commands inside, for example a Python interpreter to test installed dependencies, or start the development server that reloads when it detects changes.
+
+To get inside the container with a `bash` session you can start the stack with:
+
+```console
+$ docker compose watch
+```
+
+and then in another terminal, `exec` inside the running container:
+
+```console
+$ docker compose exec backend bash
+```
+
+You should see an output like:
+
+```console
+root@7f2607af31c3:/app#
+```
+
+that means that you are in a `bash` session inside your container, as a `root` user, under the `/app` directory, this directory has another directory called "app" inside, that's where your code lives inside the container: `/app/app`.
+
+There you can use the `fastapi run --reload` command to run the debug live reloading server.
+
+```console
+$ fastapi run --reload app/main.py
+```
+
+...it will look like:
+
+```console
+root@7f2607af31c3:/app# fastapi run --reload app/main.py
+```
+
+and then hit enter. That runs the live reloading server that auto reloads when it detects code changes.
+
+Nevertheless, if it doesn't detect a change but a syntax error, it will just stop with an error. But as the container is still alive and you are in a Bash session, you can quickly restart it after fixing the error, running the same command ("up arrow" and "Enter").
+
+...this previous detail is what makes it useful to have the container alive doing nothing and then, in a Bash session, make it run the live reload server.
+
+## Backend tests
+
+To test the backend run:
+
+```console
+$ bash ./scripts/test.sh
+```
+
+The tests run with Pytest, modify and add tests to `./backend/tests/`.
+
+If you use GitHub Actions the tests will run automatically.
+
+### Test running stack
+
+If your stack is already up and you just want to run the tests, you can use:
+
+```bash
+docker compose exec backend bash scripts/tests-start.sh
+```
+
+That `/app/scripts/tests-start.sh` script just calls `pytest` after making sure that the rest of the stack is running. If you need to pass extra arguments to `pytest`, you can pass them to that command and they will be forwarded.
+
+For example, to stop on first error:
+
+```bash
+docker compose exec backend bash scripts/tests-start.sh -x
+```
+
+### Test Coverage
+
+When the tests are run, a file `htmlcov/index.html` is generated, you can open it in your browser to see the coverage of the tests.
+
+## Migrations
+
+As during local development your app directory is mounted as a volume inside the container, you can also run the migrations with `alembic` commands inside the container and the migration code will be in your app directory (instead of being only inside the container). So you can add it to your git repository.
+
+Make sure you create a "revision" of your models and that you "upgrade" your database with that revision every time you change them. As this is what will update the tables in your database. Otherwise, your application will have errors.
+
+* Start an interactive session in the backend container:
+
+```console
+$ docker compose exec backend bash
+```
+
+* Alembic is already configured to import your SQLModel models from `./backend/app/models.py`.
+
+* After changing a model (for example, adding a column), inside the container, create a revision, e.g.:
+
+```console
+$ alembic revision --autogenerate -m "Add column last_name to User model"
+```
+
+* Commit to the git repository the files generated in the alembic directory.
+
+* After creating the revision, run the migration in the database (this is what will actually change the database):
+
+```console
+$ alembic upgrade head
+```
+
+If you don't want to use migrations at all, uncomment the lines in the file at `./backend/app/core/db.py` that end in:
+
+```python
+SQLModel.metadata.create_all(engine)
+```
+
+and comment the line in the file `scripts/prestart.sh` that contains:
+
+```console
+$ alembic upgrade head
+```
+
+If you don't want to start with the default models and want to remove them / modify them, from the beginning, without having any previous revision, you can remove the revision files (`.py` Python files) under `./backend/app/alembic/versions/`. And then create a first migration as described above.
+
+## Email Templates
+
+The email templates are in `./backend/app/email-templates/`. Here, there are two directories: `build` and `src`. The `src` directory contains the source files that are used to build the final email templates. The `build` directory contains the final email templates that are used by the application.
+
+Before continuing, ensure you have the [MJML extension](https://github.com/mjmlio/vscode-mjml) installed in your VS Code.
+
+Once you have the MJML extension installed, you can create a new email template in the `src` directory. After creating the new email template and with the `.mjml` file open in your editor, open the command palette with `Ctrl+Shift+P` and search for `MJML: Export to HTML`. This will convert the `.mjml` file to a `.html` file and now you can save it in the build directory.
diff --git a/{{cookiecutter.project_slug}}/backend/app/alembic.ini b/backend/alembic.ini
similarity index 98%
rename from {{cookiecutter.project_slug}}/backend/app/alembic.ini
rename to backend/alembic.ini
index 921aaf17b8..24841c2bfb 100755
--- a/{{cookiecutter.project_slug}}/backend/app/alembic.ini
+++ b/backend/alembic.ini
@@ -2,7 +2,7 @@
[alembic]
# path to migration scripts
-script_location = alembic
+script_location = app/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/__init__.py b/backend/app/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/__init__.py
rename to backend/app/__init__.py
diff --git a/backend/app/alembic/README b/backend/app/alembic/README
new file mode 100755
index 0000000000..2500aa1bcf
--- /dev/null
+++ b/backend/app/alembic/README
@@ -0,0 +1 @@
+Generic single-database configuration.
diff --git a/{{cookiecutter.project_slug}}/backend/app/alembic/env.py b/backend/app/alembic/env.py
similarity index 82%
rename from {{cookiecutter.project_slug}}/backend/app/alembic/env.py
rename to backend/app/alembic/env.py
index 3ba3420643..7f29c04680 100755
--- a/{{cookiecutter.project_slug}}/backend/app/alembic/env.py
+++ b/backend/app/alembic/env.py
@@ -1,10 +1,8 @@
-from __future__ import with_statement
-
import os
+from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
-from logging.config import fileConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
@@ -20,9 +18,10 @@
# target_metadata = mymodel.Base.metadata
# target_metadata = None
-from app.db.base import Base # noqa
+from app.models import SQLModel # noqa
+from app.core.config import settings # noqa
-target_metadata = Base.metadata
+target_metadata = SQLModel.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
@@ -31,11 +30,7 @@
def get_url():
- user = os.getenv("POSTGRES_USER", "postgres")
- password = os.getenv("POSTGRES_PASSWORD", "")
- server = os.getenv("POSTGRES_SERVER", "db")
- db = os.getenv("POSTGRES_DB", "app")
- return f"postgresql://{user}:{password}@{server}/{db}"
+ return str(settings.SQLALCHEMY_DATABASE_URI)
def run_migrations_offline():
@@ -69,7 +64,9 @@ def run_migrations_online():
configuration = config.get_section(config.config_ini_section)
configuration["sqlalchemy.url"] = get_url()
connectable = engine_from_config(
- configuration, prefix="sqlalchemy.", poolclass=pool.NullPool,
+ configuration,
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
)
with connectable.connect() as connection:
diff --git a/{{cookiecutter.project_slug}}/backend/app/alembic/script.py.mako b/backend/app/alembic/script.py.mako
similarity index 94%
rename from {{cookiecutter.project_slug}}/backend/app/alembic/script.py.mako
rename to backend/app/alembic/script.py.mako
index 2c0156303a..217a9a8b7b 100755
--- a/{{cookiecutter.project_slug}}/backend/app/alembic/script.py.mako
+++ b/backend/app/alembic/script.py.mako
@@ -7,6 +7,7 @@ Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
${imports if imports else ""}
# revision identifiers, used by Alembic.
diff --git a/{{cookiecutter.project_slug}}/backend/app/alembic/versions/.keep b/backend/app/alembic/versions/.keep
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/alembic/versions/.keep
rename to backend/app/alembic/versions/.keep
diff --git a/backend/app/alembic/versions/1a31ce608336_add_cascade_delete_relationships.py b/backend/app/alembic/versions/1a31ce608336_add_cascade_delete_relationships.py
new file mode 100644
index 0000000000..10e47a1456
--- /dev/null
+++ b/backend/app/alembic/versions/1a31ce608336_add_cascade_delete_relationships.py
@@ -0,0 +1,37 @@
+"""Add cascade delete relationships
+
+Revision ID: 1a31ce608336
+Revises: d98dd8ec85a3
+Create Date: 2024-07-31 22:24:34.447891
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+
+
+# revision identifiers, used by Alembic.
+revision = '1a31ce608336'
+down_revision = 'd98dd8ec85a3'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.alter_column('item', 'owner_id',
+ existing_type=sa.UUID(),
+ nullable=False)
+ op.drop_constraint('item_owner_id_fkey', 'item', type_='foreignkey')
+ op.create_foreign_key(None, 'item', 'user', ['owner_id'], ['id'], ondelete='CASCADE')
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_constraint(None, 'item', type_='foreignkey')
+ op.create_foreign_key('item_owner_id_fkey', 'item', 'user', ['owner_id'], ['id'])
+ op.alter_column('item', 'owner_id',
+ existing_type=sa.UUID(),
+ nullable=True)
+ # ### end Alembic commands ###
diff --git a/backend/app/alembic/versions/9c0a54914c78_add_max_length_for_string_varchar_.py b/backend/app/alembic/versions/9c0a54914c78_add_max_length_for_string_varchar_.py
new file mode 100755
index 0000000000..78a41773b9
--- /dev/null
+++ b/backend/app/alembic/versions/9c0a54914c78_add_max_length_for_string_varchar_.py
@@ -0,0 +1,69 @@
+"""Add max length for string(varchar) fields in User and Items models
+
+Revision ID: 9c0a54914c78
+Revises: e2412789c190
+Create Date: 2024-06-17 14:42:44.639457
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+
+
+# revision identifiers, used by Alembic.
+revision = '9c0a54914c78'
+down_revision = 'e2412789c190'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # Adjust the length of the email field in the User table
+ op.alter_column('user', 'email',
+ existing_type=sa.String(),
+ type_=sa.String(length=255),
+ existing_nullable=False)
+
+ # Adjust the length of the full_name field in the User table
+ op.alter_column('user', 'full_name',
+ existing_type=sa.String(),
+ type_=sa.String(length=255),
+ existing_nullable=True)
+
+ # Adjust the length of the title field in the Item table
+ op.alter_column('item', 'title',
+ existing_type=sa.String(),
+ type_=sa.String(length=255),
+ existing_nullable=False)
+
+ # Adjust the length of the description field in the Item table
+ op.alter_column('item', 'description',
+ existing_type=sa.String(),
+ type_=sa.String(length=255),
+ existing_nullable=True)
+
+
+def downgrade():
+ # Revert the length of the email field in the User table
+ op.alter_column('user', 'email',
+ existing_type=sa.String(length=255),
+ type_=sa.String(),
+ existing_nullable=False)
+
+ # Revert the length of the full_name field in the User table
+ op.alter_column('user', 'full_name',
+ existing_type=sa.String(length=255),
+ type_=sa.String(),
+ existing_nullable=True)
+
+ # Revert the length of the title field in the Item table
+ op.alter_column('item', 'title',
+ existing_type=sa.String(length=255),
+ type_=sa.String(),
+ existing_nullable=False)
+
+ # Revert the length of the description field in the Item table
+ op.alter_column('item', 'description',
+ existing_type=sa.String(length=255),
+ type_=sa.String(),
+ existing_nullable=True)
diff --git a/backend/app/alembic/versions/d98dd8ec85a3_edit_replace_id_integers_in_all_models_.py b/backend/app/alembic/versions/d98dd8ec85a3_edit_replace_id_integers_in_all_models_.py
new file mode 100755
index 0000000000..37af1fa215
--- /dev/null
+++ b/backend/app/alembic/versions/d98dd8ec85a3_edit_replace_id_integers_in_all_models_.py
@@ -0,0 +1,90 @@
+"""Edit replace id integers in all models to use UUID instead
+
+Revision ID: d98dd8ec85a3
+Revises: 9c0a54914c78
+Create Date: 2024-07-19 04:08:04.000976
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+from sqlalchemy.dialects import postgresql
+
+
+# revision identifiers, used by Alembic.
+revision = 'd98dd8ec85a3'
+down_revision = '9c0a54914c78'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # Ensure uuid-ossp extension is available
+ op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"')
+
+ # Create a new UUID column with a default UUID value
+ op.add_column('user', sa.Column('new_id', postgresql.UUID(as_uuid=True), default=sa.text('uuid_generate_v4()')))
+ op.add_column('item', sa.Column('new_id', postgresql.UUID(as_uuid=True), default=sa.text('uuid_generate_v4()')))
+ op.add_column('item', sa.Column('new_owner_id', postgresql.UUID(as_uuid=True), nullable=True))
+
+ # Populate the new columns with UUIDs
+ op.execute('UPDATE "user" SET new_id = uuid_generate_v4()')
+ op.execute('UPDATE item SET new_id = uuid_generate_v4()')
+ op.execute('UPDATE item SET new_owner_id = (SELECT new_id FROM "user" WHERE "user".id = item.owner_id)')
+
+ # Set the new_id as not nullable
+ op.alter_column('user', 'new_id', nullable=False)
+ op.alter_column('item', 'new_id', nullable=False)
+
+ # Drop old columns and rename new columns
+ op.drop_constraint('item_owner_id_fkey', 'item', type_='foreignkey')
+ op.drop_column('item', 'owner_id')
+ op.alter_column('item', 'new_owner_id', new_column_name='owner_id')
+
+ op.drop_column('user', 'id')
+ op.alter_column('user', 'new_id', new_column_name='id')
+
+ op.drop_column('item', 'id')
+ op.alter_column('item', 'new_id', new_column_name='id')
+
+ # Create primary key constraint
+ op.create_primary_key('user_pkey', 'user', ['id'])
+ op.create_primary_key('item_pkey', 'item', ['id'])
+
+ # Recreate foreign key constraint
+ op.create_foreign_key('item_owner_id_fkey', 'item', 'user', ['owner_id'], ['id'])
+
+def downgrade():
+ # Reverse the upgrade process
+ op.add_column('user', sa.Column('old_id', sa.Integer, autoincrement=True))
+ op.add_column('item', sa.Column('old_id', sa.Integer, autoincrement=True))
+ op.add_column('item', sa.Column('old_owner_id', sa.Integer, nullable=True))
+
+ # Populate the old columns with default values
+ # Generate sequences for the integer IDs if not exist
+ op.execute('CREATE SEQUENCE IF NOT EXISTS user_id_seq AS INTEGER OWNED BY "user".old_id')
+ op.execute('CREATE SEQUENCE IF NOT EXISTS item_id_seq AS INTEGER OWNED BY item.old_id')
+
+ op.execute('SELECT setval(\'user_id_seq\', COALESCE((SELECT MAX(old_id) + 1 FROM "user"), 1), false)')
+ op.execute('SELECT setval(\'item_id_seq\', COALESCE((SELECT MAX(old_id) + 1 FROM item), 1), false)')
+
+ op.execute('UPDATE "user" SET old_id = nextval(\'user_id_seq\')')
+ op.execute('UPDATE item SET old_id = nextval(\'item_id_seq\'), old_owner_id = (SELECT old_id FROM "user" WHERE "user".id = item.owner_id)')
+
+ # Drop new columns and rename old columns back
+ op.drop_constraint('item_owner_id_fkey', 'item', type_='foreignkey')
+ op.drop_column('item', 'owner_id')
+ op.alter_column('item', 'old_owner_id', new_column_name='owner_id')
+
+ op.drop_column('user', 'id')
+ op.alter_column('user', 'old_id', new_column_name='id')
+
+ op.drop_column('item', 'id')
+ op.alter_column('item', 'old_id', new_column_name='id')
+
+ # Create primary key constraint
+ op.create_primary_key('user_pkey', 'user', ['id'])
+ op.create_primary_key('item_pkey', 'item', ['id'])
+
+ # Recreate foreign key constraint
+ op.create_foreign_key('item_owner_id_fkey', 'item', 'user', ['owner_id'], ['id'])
diff --git a/backend/app/alembic/versions/e2412789c190_initialize_models.py b/backend/app/alembic/versions/e2412789c190_initialize_models.py
new file mode 100644
index 0000000000..7529ea91fa
--- /dev/null
+++ b/backend/app/alembic/versions/e2412789c190_initialize_models.py
@@ -0,0 +1,54 @@
+"""Initialize models
+
+Revision ID: e2412789c190
+Revises:
+Create Date: 2023-11-24 22:55:43.195942
+
+"""
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "e2412789c190"
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "user",
+ sa.Column("email", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.Column("is_active", sa.Boolean(), nullable=False),
+ sa.Column("is_superuser", sa.Boolean(), nullable=False),
+ sa.Column("full_name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column(
+ "hashed_password", sqlmodel.sql.sqltypes.AutoString(), nullable=False
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_index(op.f("ix_user_email"), "user", ["email"], unique=True)
+ op.create_table(
+ "item",
+ sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("title", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.Column("owner_id", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(
+ ["owner_id"],
+ ["user.id"],
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table("item")
+ op.drop_index(op.f("ix_user_email"), table_name="user")
+ op.drop_table("user")
+ # ### end Alembic commands ###
diff --git a/backend/app/alembic/versions/fe56fa70289e_add_created_at_to_user_and_item.py b/backend/app/alembic/versions/fe56fa70289e_add_created_at_to_user_and_item.py
new file mode 100644
index 0000000000..3e15754825
--- /dev/null
+++ b/backend/app/alembic/versions/fe56fa70289e_add_created_at_to_user_and_item.py
@@ -0,0 +1,31 @@
+"""Add created_at to User and Item
+
+Revision ID: fe56fa70289e
+Revises: 1a31ce608336
+Create Date: 2026-01-23 15:50:37.171462
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+
+
+# revision identifiers, used by Alembic.
+revision = 'fe56fa70289e'
+down_revision = '1a31ce608336'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('item', sa.Column('created_at', sa.DateTime(timezone=True), nullable=True))
+ op.add_column('user', sa.Column('created_at', sa.DateTime(timezone=True), nullable=True))
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('user', 'created_at')
+ op.drop_column('item', 'created_at')
+ # ### end Alembic commands ###
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/api/__init__.py b/backend/app/api/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/api/__init__.py
rename to backend/app/api/__init__.py
diff --git a/backend/app/api/deps.py b/backend/app/api/deps.py
new file mode 100644
index 0000000000..c2b83c841d
--- /dev/null
+++ b/backend/app/api/deps.py
@@ -0,0 +1,57 @@
+from collections.abc import Generator
+from typing import Annotated
+
+import jwt
+from fastapi import Depends, HTTPException, status
+from fastapi.security import OAuth2PasswordBearer
+from jwt.exceptions import InvalidTokenError
+from pydantic import ValidationError
+from sqlmodel import Session
+
+from app.core import security
+from app.core.config import settings
+from app.core.db import engine
+from app.models import TokenPayload, User
+
+reusable_oauth2 = OAuth2PasswordBearer(
+ tokenUrl=f"{settings.API_V1_STR}/login/access-token"
+)
+
+
+def get_db() -> Generator[Session, None, None]:
+ with Session(engine) as session:
+ yield session
+
+
+SessionDep = Annotated[Session, Depends(get_db)]
+TokenDep = Annotated[str, Depends(reusable_oauth2)]
+
+
+def get_current_user(session: SessionDep, token: TokenDep) -> User:
+ try:
+ payload = jwt.decode(
+ token, settings.SECRET_KEY, algorithms=[security.ALGORITHM]
+ )
+ token_data = TokenPayload(**payload)
+ except (InvalidTokenError, ValidationError):
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Could not validate credentials",
+ )
+ user = session.get(User, token_data.sub)
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+ if not user.is_active:
+ raise HTTPException(status_code=400, detail="Inactive user")
+ return user
+
+
+CurrentUser = Annotated[User, Depends(get_current_user)]
+
+
+def get_current_active_superuser(current_user: CurrentUser) -> User:
+ if not current_user.is_superuser:
+ raise HTTPException(
+ status_code=403, detail="The user doesn't have enough privileges"
+ )
+ return current_user
diff --git a/backend/app/api/main.py b/backend/app/api/main.py
new file mode 100644
index 0000000000..eac18c8e8f
--- /dev/null
+++ b/backend/app/api/main.py
@@ -0,0 +1,14 @@
+from fastapi import APIRouter
+
+from app.api.routes import items, login, private, users, utils
+from app.core.config import settings
+
+api_router = APIRouter()
+api_router.include_router(login.router)
+api_router.include_router(users.router)
+api_router.include_router(utils.router)
+api_router.include_router(items.router)
+
+
+if settings.ENVIRONMENT == "local":
+ api_router.include_router(private.router)
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/api/api_v1/__init__.py b/backend/app/api/routes/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/api/api_v1/__init__.py
rename to backend/app/api/routes/__init__.py
diff --git a/backend/app/api/routes/items.py b/backend/app/api/routes/items.py
new file mode 100644
index 0000000000..2b2ce57772
--- /dev/null
+++ b/backend/app/api/routes/items.py
@@ -0,0 +1,112 @@
+import uuid
+from typing import Any
+
+from fastapi import APIRouter, HTTPException
+from sqlmodel import func, select
+
+from app.api.deps import CurrentUser, SessionDep
+from app.models import Item, ItemCreate, ItemPublic, ItemsPublic, ItemUpdate, Message
+
+router = APIRouter(prefix="/items", tags=["items"])
+
+
+@router.get("/", response_model=ItemsPublic)
+def read_items(
+ session: SessionDep, current_user: CurrentUser, skip: int = 0, limit: int = 100
+) -> Any:
+ """
+ Retrieve items.
+ """
+
+ if current_user.is_superuser:
+ count_statement = select(func.count()).select_from(Item)
+ count = session.exec(count_statement).one()
+ statement = (
+ select(Item).order_by(Item.created_at.desc()).offset(skip).limit(limit)
+ )
+ items = session.exec(statement).all()
+ else:
+ count_statement = (
+ select(func.count())
+ .select_from(Item)
+ .where(Item.owner_id == current_user.id)
+ )
+ count = session.exec(count_statement).one()
+ statement = (
+ select(Item)
+ .where(Item.owner_id == current_user.id)
+ .order_by(Item.created_at.desc())
+ .offset(skip)
+ .limit(limit)
+ )
+ items = session.exec(statement).all()
+
+ return ItemsPublic(data=items, count=count)
+
+
+@router.get("/{id}", response_model=ItemPublic)
+def read_item(session: SessionDep, current_user: CurrentUser, id: uuid.UUID) -> Any:
+ """
+ Get item by ID.
+ """
+ item = session.get(Item, id)
+ if not item:
+ raise HTTPException(status_code=404, detail="Item not found")
+ if not current_user.is_superuser and (item.owner_id != current_user.id):
+ raise HTTPException(status_code=403, detail="Not enough permissions")
+ return item
+
+
+@router.post("/", response_model=ItemPublic)
+def create_item(
+ *, session: SessionDep, current_user: CurrentUser, item_in: ItemCreate
+) -> Any:
+ """
+ Create new item.
+ """
+ item = Item.model_validate(item_in, update={"owner_id": current_user.id})
+ session.add(item)
+ session.commit()
+ session.refresh(item)
+ return item
+
+
+@router.put("/{id}", response_model=ItemPublic)
+def update_item(
+ *,
+ session: SessionDep,
+ current_user: CurrentUser,
+ id: uuid.UUID,
+ item_in: ItemUpdate,
+) -> Any:
+ """
+ Update an item.
+ """
+ item = session.get(Item, id)
+ if not item:
+ raise HTTPException(status_code=404, detail="Item not found")
+ if not current_user.is_superuser and (item.owner_id != current_user.id):
+ raise HTTPException(status_code=403, detail="Not enough permissions")
+ update_dict = item_in.model_dump(exclude_unset=True)
+ item.sqlmodel_update(update_dict)
+ session.add(item)
+ session.commit()
+ session.refresh(item)
+ return item
+
+
+@router.delete("/{id}")
+def delete_item(
+ session: SessionDep, current_user: CurrentUser, id: uuid.UUID
+) -> Message:
+ """
+ Delete an item.
+ """
+ item = session.get(Item, id)
+ if not item:
+ raise HTTPException(status_code=404, detail="Item not found")
+ if not current_user.is_superuser and (item.owner_id != current_user.id):
+ raise HTTPException(status_code=403, detail="Not enough permissions")
+ session.delete(item)
+ session.commit()
+ return Message(message="Item deleted successfully")
diff --git a/backend/app/api/routes/login.py b/backend/app/api/routes/login.py
new file mode 100644
index 0000000000..58441e37e9
--- /dev/null
+++ b/backend/app/api/routes/login.py
@@ -0,0 +1,123 @@
+from datetime import timedelta
+from typing import Annotated, Any
+
+from fastapi import APIRouter, Depends, HTTPException
+from fastapi.responses import HTMLResponse
+from fastapi.security import OAuth2PasswordRequestForm
+
+from app import crud
+from app.api.deps import CurrentUser, SessionDep, get_current_active_superuser
+from app.core import security
+from app.core.config import settings
+from app.models import Message, NewPassword, Token, UserPublic, UserUpdate
+from app.utils import (
+ generate_password_reset_token,
+ generate_reset_password_email,
+ send_email,
+ verify_password_reset_token,
+)
+
+router = APIRouter(tags=["login"])
+
+
+@router.post("/login/access-token")
+def login_access_token(
+ session: SessionDep, form_data: Annotated[OAuth2PasswordRequestForm, Depends()]
+) -> Token:
+ """
+ OAuth2 compatible token login, get an access token for future requests
+ """
+ user = crud.authenticate(
+ session=session, email=form_data.username, password=form_data.password
+ )
+ if not user:
+ raise HTTPException(status_code=400, detail="Incorrect email or password")
+ elif not user.is_active:
+ raise HTTPException(status_code=400, detail="Inactive user")
+ access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
+ return Token(
+ access_token=security.create_access_token(
+ user.id, expires_delta=access_token_expires
+ )
+ )
+
+
+@router.post("/login/test-token", response_model=UserPublic)
+def test_token(current_user: CurrentUser) -> Any:
+ """
+ Test access token
+ """
+ return current_user
+
+
+@router.post("/password-recovery/{email}")
+def recover_password(email: str, session: SessionDep) -> Message:
+ """
+ Password Recovery
+ """
+ user = crud.get_user_by_email(session=session, email=email)
+
+ # Always return the same response to prevent email enumeration attacks
+ # Only send email if user actually exists
+ if user:
+ password_reset_token = generate_password_reset_token(email=email)
+ email_data = generate_reset_password_email(
+ email_to=user.email, email=email, token=password_reset_token
+ )
+ send_email(
+ email_to=user.email,
+ subject=email_data.subject,
+ html_content=email_data.html_content,
+ )
+ return Message(
+ message="If that email is registered, we sent a password recovery link"
+ )
+
+
+@router.post("/reset-password/")
+def reset_password(session: SessionDep, body: NewPassword) -> Message:
+ """
+ Reset password
+ """
+ email = verify_password_reset_token(token=body.token)
+ if not email:
+ raise HTTPException(status_code=400, detail="Invalid token")
+ user = crud.get_user_by_email(session=session, email=email)
+ if not user:
+ # Don't reveal that the user doesn't exist - use same error as invalid token
+ raise HTTPException(status_code=400, detail="Invalid token")
+ elif not user.is_active:
+ raise HTTPException(status_code=400, detail="Inactive user")
+ user_in_update = UserUpdate(password=body.new_password)
+ crud.update_user(
+ session=session,
+ db_user=user,
+ user_in=user_in_update,
+ )
+ return Message(message="Password updated successfully")
+
+
+@router.post(
+ "/password-recovery-html-content/{email}",
+ dependencies=[Depends(get_current_active_superuser)],
+ response_class=HTMLResponse,
+)
+def recover_password_html_content(email: str, session: SessionDep) -> Any:
+ """
+ HTML Content for Password Recovery
+ """
+ user = crud.get_user_by_email(session=session, email=email)
+
+ if not user:
+ raise HTTPException(
+ status_code=404,
+ detail="The user with this username does not exist in the system.",
+ )
+ password_reset_token = generate_password_reset_token(email=email)
+ email_data = generate_reset_password_email(
+ email_to=user.email, email=email, token=password_reset_token
+ )
+
+ return HTMLResponse(
+ content=email_data.html_content, headers={"subject:": email_data.subject}
+ )
diff --git a/backend/app/api/routes/private.py b/backend/app/api/routes/private.py
new file mode 100644
index 0000000000..9f33ef1900
--- /dev/null
+++ b/backend/app/api/routes/private.py
@@ -0,0 +1,38 @@
+from typing import Any
+
+from fastapi import APIRouter
+from pydantic import BaseModel
+
+from app.api.deps import SessionDep
+from app.core.security import get_password_hash
+from app.models import (
+ User,
+ UserPublic,
+)
+
+router = APIRouter(tags=["private"], prefix="/private")
+
+
+class PrivateUserCreate(BaseModel):
+ email: str
+ password: str
+ full_name: str
+ is_verified: bool = False
+
+
+@router.post("/users/", response_model=UserPublic)
+def create_user(user_in: PrivateUserCreate, session: SessionDep) -> Any:
+ """
+ Create a new user.
+ """
+
+ user = User(
+ email=user_in.email,
+ full_name=user_in.full_name,
+ hashed_password=get_password_hash(user_in.password),
+ )
+
+ session.add(user)
+ session.commit()
+
+ return user
diff --git a/backend/app/api/routes/users.py b/backend/app/api/routes/users.py
new file mode 100644
index 0000000000..61727949c8
--- /dev/null
+++ b/backend/app/api/routes/users.py
@@ -0,0 +1,229 @@
+import uuid
+from typing import Any
+
+from fastapi import APIRouter, Depends, HTTPException
+from sqlmodel import col, delete, func, select
+
+from app import crud
+from app.api.deps import (
+ CurrentUser,
+ SessionDep,
+ get_current_active_superuser,
+)
+from app.core.config import settings
+from app.core.security import get_password_hash, verify_password
+from app.models import (
+ Item,
+ Message,
+ UpdatePassword,
+ User,
+ UserCreate,
+ UserPublic,
+ UserRegister,
+ UsersPublic,
+ UserUpdate,
+ UserUpdateMe,
+)
+from app.utils import generate_new_account_email, send_email
+
+router = APIRouter(prefix="/users", tags=["users"])
+
+
+@router.get(
+ "/",
+ dependencies=[Depends(get_current_active_superuser)],
+ response_model=UsersPublic,
+)
+def read_users(session: SessionDep, skip: int = 0, limit: int = 100) -> Any:
+ """
+ Retrieve users.
+ """
+
+ count_statement = select(func.count()).select_from(User)
+ count = session.exec(count_statement).one()
+
+ statement = select(User).order_by(User.created_at.desc()).offset(skip).limit(limit)
+ users = session.exec(statement).all()
+
+ return UsersPublic(data=users, count=count)
+
+
+@router.post(
+ "/", dependencies=[Depends(get_current_active_superuser)], response_model=UserPublic
+)
+def create_user(*, session: SessionDep, user_in: UserCreate) -> Any:
+ """
+ Create new user.
+ """
+ user = crud.get_user_by_email(session=session, email=user_in.email)
+ if user:
+ raise HTTPException(
+ status_code=400,
+ detail="The user with this email already exists in the system.",
+ )
+
+ user = crud.create_user(session=session, user_create=user_in)
+ if settings.emails_enabled and user_in.email:
+ email_data = generate_new_account_email(
+ email_to=user_in.email, username=user_in.email, password=user_in.password
+ )
+ send_email(
+ email_to=user_in.email,
+ subject=email_data.subject,
+ html_content=email_data.html_content,
+ )
+ return user
+
+
+@router.patch("/me", response_model=UserPublic)
+def update_user_me(
+ *, session: SessionDep, user_in: UserUpdateMe, current_user: CurrentUser
+) -> Any:
+ """
+ Update own user.
+ """
+
+ if user_in.email:
+ existing_user = crud.get_user_by_email(session=session, email=user_in.email)
+ if existing_user and existing_user.id != current_user.id:
+ raise HTTPException(
+ status_code=409, detail="User with this email already exists"
+ )
+ user_data = user_in.model_dump(exclude_unset=True)
+ current_user.sqlmodel_update(user_data)
+ session.add(current_user)
+ session.commit()
+ session.refresh(current_user)
+ return current_user
+
+
+@router.patch("/me/password", response_model=Message)
+def update_password_me(
+ *, session: SessionDep, body: UpdatePassword, current_user: CurrentUser
+) -> Any:
+ """
+ Update own password.
+ """
+ verified, _ = verify_password(body.current_password, current_user.hashed_password)
+ if not verified:
+ raise HTTPException(status_code=400, detail="Incorrect password")
+ if body.current_password == body.new_password:
+ raise HTTPException(
+ status_code=400, detail="New password cannot be the same as the current one"
+ )
+ hashed_password = get_password_hash(body.new_password)
+ current_user.hashed_password = hashed_password
+ session.add(current_user)
+ session.commit()
+ return Message(message="Password updated successfully")
+
+
+@router.get("/me", response_model=UserPublic)
+def read_user_me(current_user: CurrentUser) -> Any:
+ """
+ Get current user.
+ """
+ return current_user
+
+
+@router.delete("/me", response_model=Message)
+def delete_user_me(session: SessionDep, current_user: CurrentUser) -> Any:
+ """
+ Delete own user.
+ """
+ if current_user.is_superuser:
+ raise HTTPException(
+ status_code=403, detail="Super users are not allowed to delete themselves"
+ )
+ session.delete(current_user)
+ session.commit()
+ return Message(message="User deleted successfully")
+
+
+@router.post("/signup", response_model=UserPublic)
+def register_user(session: SessionDep, user_in: UserRegister) -> Any:
+ """
+ Create new user without the need to be logged in.
+ """
+ user = crud.get_user_by_email(session=session, email=user_in.email)
+ if user:
+ raise HTTPException(
+ status_code=400,
+ detail="The user with this email already exists in the system",
+ )
+ user_create = UserCreate.model_validate(user_in)
+ user = crud.create_user(session=session, user_create=user_create)
+ return user
+
+
+@router.get("/{user_id}", response_model=UserPublic)
+def read_user_by_id(
+ user_id: uuid.UUID, session: SessionDep, current_user: CurrentUser
+) -> Any:
+ """
+ Get a specific user by id.
+ """
+ user = session.get(User, user_id)
+ if user == current_user:
+ return user
+ if not current_user.is_superuser:
+ raise HTTPException(
+ status_code=403,
+ detail="The user doesn't have enough privileges",
+ )
+ if user is None:
+ raise HTTPException(status_code=404, detail="User not found")
+ return user
+
+
+@router.patch(
+ "/{user_id}",
+ dependencies=[Depends(get_current_active_superuser)],
+ response_model=UserPublic,
+)
+def update_user(
+ *,
+ session: SessionDep,
+ user_id: uuid.UUID,
+ user_in: UserUpdate,
+) -> Any:
+ """
+ Update a user.
+ """
+
+ db_user = session.get(User, user_id)
+ if not db_user:
+ raise HTTPException(
+ status_code=404,
+ detail="The user with this id does not exist in the system",
+ )
+ if user_in.email:
+ existing_user = crud.get_user_by_email(session=session, email=user_in.email)
+ if existing_user and existing_user.id != user_id:
+ raise HTTPException(
+ status_code=409, detail="User with this email already exists"
+ )
+
+ db_user = crud.update_user(session=session, db_user=db_user, user_in=user_in)
+ return db_user
+
+
+@router.delete("/{user_id}", dependencies=[Depends(get_current_active_superuser)])
+def delete_user(
+ session: SessionDep, current_user: CurrentUser, user_id: uuid.UUID
+) -> Message:
+ """
+ Delete a user.
+ """
+ user = session.get(User, user_id)
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+ if user == current_user:
+ raise HTTPException(
+ status_code=403, detail="Super users are not allowed to delete themselves"
+ )
+ statement = delete(Item).where(col(Item.owner_id) == user_id)
+ session.exec(statement) # type: ignore
+ session.delete(user)
+ session.commit()
+ return Message(message="User deleted successfully")
diff --git a/backend/app/api/routes/utils.py b/backend/app/api/routes/utils.py
new file mode 100644
index 0000000000..fc093419b3
--- /dev/null
+++ b/backend/app/api/routes/utils.py
@@ -0,0 +1,31 @@
+from fastapi import APIRouter, Depends
+from pydantic.networks import EmailStr
+
+from app.api.deps import get_current_active_superuser
+from app.models import Message
+from app.utils import generate_test_email, send_email
+
+router = APIRouter(prefix="/utils", tags=["utils"])
+
+
+@router.post(
+ "/test-email/",
+ dependencies=[Depends(get_current_active_superuser)],
+ status_code=201,
+)
+def test_email(email_to: EmailStr) -> Message:
+ """
+ Test emails.
+ """
+ email_data = generate_test_email(email_to=email_to)
+ send_email(
+ email_to=email_to,
+ subject=email_data.subject,
+ html_content=email_data.html_content,
+ )
+ return Message(message="Test email sent")
+
+
+@router.get("/health-check/")
+async def health_check() -> bool:
+ return True
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/backend_pre_start.py b/backend/app/backend_pre_start.py
similarity index 68%
rename from {{cookiecutter.project_slug}}/backend/app/app/backend_pre_start.py
rename to backend/app/backend_pre_start.py
index 3363a41542..c2f8e29ae1 100644
--- a/{{cookiecutter.project_slug}}/backend/app/app/backend_pre_start.py
+++ b/backend/app/backend_pre_start.py
@@ -1,8 +1,10 @@
import logging
+from sqlalchemy import Engine
+from sqlmodel import Session, select
from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed
-from app.db.session import SessionLocal
+from app.core.db import engine
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@@ -17,11 +19,11 @@
before=before_log(logger, logging.INFO),
after=after_log(logger, logging.WARN),
)
-def init() -> None:
+def init(db_engine: Engine) -> None:
try:
- db = SessionLocal()
- # Try to create session to check if DB is awake
- db.execute("SELECT 1")
+ with Session(db_engine) as session:
+ # Try to create session to check if DB is awake
+ session.exec(select(1))
except Exception as e:
logger.error(e)
raise e
@@ -29,7 +31,7 @@ def init() -> None:
def main() -> None:
logger.info("Initializing service")
- init()
+ init(engine)
logger.info("Service finished initializing")
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/api/api_v1/endpoints/__init__.py b/backend/app/core/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/api/api_v1/endpoints/__init__.py
rename to backend/app/core/__init__.py
diff --git a/backend/app/core/config.py b/backend/app/core/config.py
new file mode 100644
index 0000000000..650b9f7910
--- /dev/null
+++ b/backend/app/core/config.py
@@ -0,0 +1,119 @@
+import secrets
+import warnings
+from typing import Annotated, Any, Literal
+
+from pydantic import (
+ AnyUrl,
+ BeforeValidator,
+ EmailStr,
+ HttpUrl,
+ PostgresDsn,
+ computed_field,
+ model_validator,
+)
+from pydantic_settings import BaseSettings, SettingsConfigDict
+from typing_extensions import Self
+
+
+def parse_cors(v: Any) -> list[str] | str:
+ if isinstance(v, str) and not v.startswith("["):
+ return [i.strip() for i in v.split(",") if i.strip()]
+ elif isinstance(v, list | str):
+ return v
+ raise ValueError(v)
+
+
+class Settings(BaseSettings):
+ model_config = SettingsConfigDict(
+ # Use top level .env file (one level above ./backend/)
+ env_file="../.env",
+ env_ignore_empty=True,
+ extra="ignore",
+ )
+ API_V1_STR: str = "/api/v1"
+ SECRET_KEY: str = secrets.token_urlsafe(32)
+ # 60 minutes * 24 hours * 8 days = 8 days
+ ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8
+ FRONTEND_HOST: str = "http://localhost:5173"
+ ENVIRONMENT: Literal["local", "staging", "production"] = "local"
+
+ BACKEND_CORS_ORIGINS: Annotated[
+ list[AnyUrl] | str, BeforeValidator(parse_cors)
+ ] = []
+
+ @computed_field # type: ignore[prop-decorator]
+ @property
+ def all_cors_origins(self) -> list[str]:
+ return [str(origin).rstrip("/") for origin in self.BACKEND_CORS_ORIGINS] + [
+ self.FRONTEND_HOST
+ ]
+
+ PROJECT_NAME: str
+ SENTRY_DSN: HttpUrl | None = None
+ POSTGRES_SERVER: str
+ POSTGRES_PORT: int = 5432
+ POSTGRES_USER: str
+ POSTGRES_PASSWORD: str = ""
+ POSTGRES_DB: str = ""
+
+ @computed_field # type: ignore[prop-decorator]
+ @property
+ def SQLALCHEMY_DATABASE_URI(self) -> PostgresDsn:
+ return PostgresDsn.build(
+ scheme="postgresql+psycopg",
+ username=self.POSTGRES_USER,
+ password=self.POSTGRES_PASSWORD,
+ host=self.POSTGRES_SERVER,
+ port=self.POSTGRES_PORT,
+ path=self.POSTGRES_DB,
+ )
+
+ SMTP_TLS: bool = True
+ SMTP_SSL: bool = False
+ SMTP_PORT: int = 587
+ SMTP_HOST: str | None = None
+ SMTP_USER: str | None = None
+ SMTP_PASSWORD: str | None = None
+ EMAILS_FROM_EMAIL: EmailStr | None = None
+ EMAILS_FROM_NAME: str | None = None
+
+ @model_validator(mode="after")
+ def _set_default_emails_from(self) -> Self:
+ if not self.EMAILS_FROM_NAME:
+ self.EMAILS_FROM_NAME = self.PROJECT_NAME
+ return self
+
+ EMAIL_RESET_TOKEN_EXPIRE_HOURS: int = 48
+
+ @computed_field # type: ignore[prop-decorator]
+ @property
+ def emails_enabled(self) -> bool:
+ return bool(self.SMTP_HOST and self.EMAILS_FROM_EMAIL)
+
+ EMAIL_TEST_USER: EmailStr = "test@example.com"
+ FIRST_SUPERUSER: EmailStr
+ FIRST_SUPERUSER_PASSWORD: str
+
+ def _check_default_secret(self, var_name: str, value: str | None) -> None:
+ if value == "changethis":
+ message = (
+ f'The value of {var_name} is "changethis", '
+ "for security, please change it, at least for deployments."
+ )
+ if self.ENVIRONMENT == "local":
+ warnings.warn(message, stacklevel=1)
+ else:
+ raise ValueError(message)
+
+ @model_validator(mode="after")
+ def _enforce_non_default_secrets(self) -> Self:
+ self._check_default_secret("SECRET_KEY", self.SECRET_KEY)
+ self._check_default_secret("POSTGRES_PASSWORD", self.POSTGRES_PASSWORD)
+ self._check_default_secret(
+ "FIRST_SUPERUSER_PASSWORD", self.FIRST_SUPERUSER_PASSWORD
+ )
+
+ return self
+
+
+settings = Settings() # type: ignore
diff --git a/backend/app/core/db.py b/backend/app/core/db.py
new file mode 100644
index 0000000000..ba991fb36d
--- /dev/null
+++ b/backend/app/core/db.py
@@ -0,0 +1,33 @@
+from sqlmodel import Session, create_engine, select
+
+from app import crud
+from app.core.config import settings
+from app.models import User, UserCreate
+
+engine = create_engine(str(settings.SQLALCHEMY_DATABASE_URI))
+
+
+# make sure all SQLModel models are imported (app.models) before initializing DB
+# otherwise, SQLModel might fail to initialize relationships properly
+# for more details: https://github.com/fastapi/full-stack-fastapi-template/issues/28
+
+
+def init_db(session: Session) -> None:
+ # Tables should be created with Alembic migrations
+ # But if you don't want to use migrations, create
+ # the tables un-commenting the next lines
+ # from sqlmodel import SQLModel
+
+ # This works because the models are already imported and registered from app.models
+ # SQLModel.metadata.create_all(engine)
+
+ user = session.exec(
+ select(User).where(User.email == settings.FIRST_SUPERUSER)
+ ).first()
+ if not user:
+ user_in = UserCreate(
+ email=settings.FIRST_SUPERUSER,
+ password=settings.FIRST_SUPERUSER_PASSWORD,
+ is_superuser=True,
+ )
+ user = crud.create_user(session=session, user_create=user_in)
diff --git a/backend/app/core/security.py b/backend/app/core/security.py
new file mode 100644
index 0000000000..1e49ebc1fe
--- /dev/null
+++ b/backend/app/core/security.py
@@ -0,0 +1,36 @@
+from datetime import datetime, timedelta, timezone
+from typing import Any
+
+import jwt
+from pwdlib import PasswordHash
+from pwdlib.hashers.argon2 import Argon2Hasher
+from pwdlib.hashers.bcrypt import BcryptHasher
+
+from app.core.config import settings
+
+password_hash = PasswordHash(
+ (
+ Argon2Hasher(),
+ BcryptHasher(),
+ )
+)
+
+
+ALGORITHM = "HS256"
+
+
+def create_access_token(subject: str | Any, expires_delta: timedelta) -> str:
+ expire = datetime.now(timezone.utc) + expires_delta
+ to_encode = {"exp": expire, "sub": str(subject)}
+ encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=ALGORITHM)
+ return encoded_jwt
+
+
+def verify_password(
+ plain_password: str, hashed_password: str
+) -> tuple[bool, str | None]:
+ return password_hash.verify_and_update(plain_password, hashed_password)
+
+
+def get_password_hash(password: str) -> str:
+ return password_hash.hash(password)
diff --git a/backend/app/crud.py b/backend/app/crud.py
new file mode 100644
index 0000000000..a8ceba6444
--- /dev/null
+++ b/backend/app/crud.py
@@ -0,0 +1,68 @@
+import uuid
+from typing import Any
+
+from sqlmodel import Session, select
+
+from app.core.security import get_password_hash, verify_password
+from app.models import Item, ItemCreate, User, UserCreate, UserUpdate
+
+
+def create_user(*, session: Session, user_create: UserCreate) -> User:
+ db_obj = User.model_validate(
+ user_create, update={"hashed_password": get_password_hash(user_create.password)}
+ )
+ session.add(db_obj)
+ session.commit()
+ session.refresh(db_obj)
+ return db_obj
+
+
+def update_user(*, session: Session, db_user: User, user_in: UserUpdate) -> Any:
+ user_data = user_in.model_dump(exclude_unset=True)
+ extra_data = {}
+ if "password" in user_data:
+ password = user_data["password"]
+ hashed_password = get_password_hash(password)
+ extra_data["hashed_password"] = hashed_password
+ db_user.sqlmodel_update(user_data, update=extra_data)
+ session.add(db_user)
+ session.commit()
+ session.refresh(db_user)
+ return db_user
+
+
+def get_user_by_email(*, session: Session, email: str) -> User | None:
+ statement = select(User).where(User.email == email)
+ session_user = session.exec(statement).first()
+ return session_user
+
+
+# Dummy hash to use for timing attack prevention when user is not found
+# This is an Argon2 hash of a random password, used to ensure constant-time comparison
+DUMMY_HASH = "$argon2id$v=19$m=65536,t=3,p=4$MjQyZWE1MzBjYjJlZTI0Yw$YTU4NGM5ZTZmYjE2NzZlZjY0ZWY3ZGRkY2U2OWFjNjk"
+
+
+def authenticate(*, session: Session, email: str, password: str) -> User | None:
+ db_user = get_user_by_email(session=session, email=email)
+ if not db_user:
+ # Prevent timing attacks by running password verification even when user doesn't exist
+ # This ensures the response time is similar whether or not the email exists
+ verify_password(password, DUMMY_HASH)
+ return None
+ verified, updated_password_hash = verify_password(password, db_user.hashed_password)
+ if not verified:
+ return None
+ if updated_password_hash:
+ db_user.hashed_password = updated_password_hash
+ session.add(db_user)
+ session.commit()
+ session.refresh(db_user)
+ return db_user
+
+
+def create_item(*, session: Session, item_in: ItemCreate, owner_id: uuid.UUID) -> Item:
+ db_item = Item.model_validate(item_in, update={"owner_id": owner_id})
+ session.add(db_item)
+ session.commit()
+ session.refresh(db_item)
+ return db_item
diff --git a/backend/app/email-templates/build/new_account.html b/backend/app/email-templates/build/new_account.html
new file mode 100644
index 0000000000..344505033b
--- /dev/null
+++ b/backend/app/email-templates/build/new_account.html
@@ -0,0 +1,25 @@
+
\ No newline at end of file
diff --git a/backend/app/email-templates/build/reset_password.html b/backend/app/email-templates/build/reset_password.html
new file mode 100644
index 0000000000..4148a5b773
--- /dev/null
+++ b/backend/app/email-templates/build/reset_password.html
@@ -0,0 +1,25 @@
+
{{ project_name }} - Password Recovery
Hello {{ username }}
We've received a request to reset your password. You can do it by clicking the button below:
This password will expire in {{ valid_hours }} hours.
If you didn't request a password recovery you can disregard this email.
\ No newline at end of file
diff --git a/backend/app/email-templates/build/test_email.html b/backend/app/email-templates/build/test_email.html
new file mode 100644
index 0000000000..04d0d85092
--- /dev/null
+++ b/backend/app/email-templates/build/test_email.html
@@ -0,0 +1,25 @@
+
{{ project_name }}
Test email for: {{ email }}
\ No newline at end of file
diff --git a/backend/app/email-templates/src/new_account.mjml b/backend/app/email-templates/src/new_account.mjml
new file mode 100644
index 0000000000..f41a3e3cf1
--- /dev/null
+++ b/backend/app/email-templates/src/new_account.mjml
@@ -0,0 +1,15 @@
+
+
+
+
+ {{ project_name }} - New Account
+ Welcome to your new account!
+ Here are your account details:
+ Username: {{ username }}
+ Password: {{ password }}
+ Go to Dashboard
+
+
+
+
+
diff --git a/backend/app/email-templates/src/reset_password.mjml b/backend/app/email-templates/src/reset_password.mjml
new file mode 100644
index 0000000000..743f5d77f4
--- /dev/null
+++ b/backend/app/email-templates/src/reset_password.mjml
@@ -0,0 +1,17 @@
+
+
+
+
+ {{ project_name }} - Password Recovery
+ Hello {{ username }}
+ We've received a request to reset your password. You can do it by clicking the button below:
+ Reset password
+ Or copy and paste the following link into your browser:
+ {{ link }}
+ This password will expire in {{ valid_hours }} hours.
+
+ If you didn't request a password recovery you can disregard this email.
+
+
+
+
diff --git a/backend/app/email-templates/src/test_email.mjml b/backend/app/email-templates/src/test_email.mjml
new file mode 100644
index 0000000000..45d58d6bac
--- /dev/null
+++ b/backend/app/email-templates/src/test_email.mjml
@@ -0,0 +1,11 @@
+
+
+
+
+ {{ project_name }}
+ Test email for: {{ email }}
+
+
+
+
+
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/initial_data.py b/backend/app/initial_data.py
similarity index 67%
rename from {{cookiecutter.project_slug}}/backend/app/app/initial_data.py
rename to backend/app/initial_data.py
index c50646d2df..d806c3d381 100644
--- a/{{cookiecutter.project_slug}}/backend/app/app/initial_data.py
+++ b/backend/app/initial_data.py
@@ -1,15 +1,16 @@
import logging
-from app.db.init_db import init_db
-from app.db.session import SessionLocal
+from sqlmodel import Session
+
+from app.core.db import engine, init_db
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def init() -> None:
- db = SessionLocal()
- init_db(db)
+ with Session(engine) as session:
+ init_db(session)
def main() -> None:
diff --git a/backend/app/main.py b/backend/app/main.py
new file mode 100644
index 0000000000..9a95801e74
--- /dev/null
+++ b/backend/app/main.py
@@ -0,0 +1,33 @@
+import sentry_sdk
+from fastapi import FastAPI
+from fastapi.routing import APIRoute
+from starlette.middleware.cors import CORSMiddleware
+
+from app.api.main import api_router
+from app.core.config import settings
+
+
+def custom_generate_unique_id(route: APIRoute) -> str:
+ return f"{route.tags[0]}-{route.name}"
+
+
+if settings.SENTRY_DSN and settings.ENVIRONMENT != "local":
+ sentry_sdk.init(dsn=str(settings.SENTRY_DSN), enable_tracing=True)
+
+app = FastAPI(
+ title=settings.PROJECT_NAME,
+ openapi_url=f"{settings.API_V1_STR}/openapi.json",
+ generate_unique_id_function=custom_generate_unique_id,
+)
+
+# Set all CORS enabled origins
+if settings.all_cors_origins:
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=settings.all_cors_origins,
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+app.include_router(api_router, prefix=settings.API_V1_STR)
diff --git a/backend/app/models.py b/backend/app/models.py
new file mode 100644
index 0000000000..b5132e0e2c
--- /dev/null
+++ b/backend/app/models.py
@@ -0,0 +1,129 @@
+import uuid
+from datetime import datetime, timezone
+
+from pydantic import EmailStr
+from sqlalchemy import DateTime
+from sqlmodel import Field, Relationship, SQLModel
+
+
+def get_datetime_utc() -> datetime:
+ return datetime.now(timezone.utc)
+
+
+# Shared properties
+class UserBase(SQLModel):
+ email: EmailStr = Field(unique=True, index=True, max_length=255)
+ is_active: bool = True
+ is_superuser: bool = False
+ full_name: str | None = Field(default=None, max_length=255)
+
+
+# Properties to receive via API on creation
+class UserCreate(UserBase):
+ password: str = Field(min_length=8, max_length=128)
+
+
+class UserRegister(SQLModel):
+ email: EmailStr = Field(max_length=255)
+ password: str = Field(min_length=8, max_length=128)
+ full_name: str | None = Field(default=None, max_length=255)
+
+
+# Properties to receive via API on update, all are optional
+class UserUpdate(UserBase):
+ email: EmailStr | None = Field(default=None, max_length=255) # type: ignore
+ password: str | None = Field(default=None, min_length=8, max_length=128)
+
+
+class UserUpdateMe(SQLModel):
+ full_name: str | None = Field(default=None, max_length=255)
+ email: EmailStr | None = Field(default=None, max_length=255)
+
+
+class UpdatePassword(SQLModel):
+ current_password: str = Field(min_length=8, max_length=128)
+ new_password: str = Field(min_length=8, max_length=128)
+
+
+# Database model, database table inferred from class name
+class User(UserBase, table=True):
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
+ hashed_password: str
+ created_at: datetime | None = Field(
+ default_factory=get_datetime_utc,
+ sa_type=DateTime(timezone=True), # type: ignore
+ )
+ items: list["Item"] = Relationship(back_populates="owner", cascade_delete=True)
+
+
+# Properties to return via API, id is always required
+class UserPublic(UserBase):
+ id: uuid.UUID
+ created_at: datetime | None = None
+
+
+class UsersPublic(SQLModel):
+ data: list[UserPublic]
+ count: int
+
+
+# Shared properties
+class ItemBase(SQLModel):
+ title: str = Field(min_length=1, max_length=255)
+ description: str | None = Field(default=None, max_length=255)
+
+
+# Properties to receive on item creation
+class ItemCreate(ItemBase):
+ pass
+
+
+# Properties to receive on item update
+class ItemUpdate(ItemBase):
+ title: str | None = Field(default=None, min_length=1, max_length=255) # type: ignore
+
+
+# Database model, database table inferred from class name
+class Item(ItemBase, table=True):
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
+ created_at: datetime | None = Field(
+ default_factory=get_datetime_utc,
+ sa_type=DateTime(timezone=True), # type: ignore
+ )
+ owner_id: uuid.UUID = Field(
+ foreign_key="user.id", nullable=False, ondelete="CASCADE"
+ )
+ owner: User | None = Relationship(back_populates="items")
+
+
+# Properties to return via API, id is always required
+class ItemPublic(ItemBase):
+ id: uuid.UUID
+ owner_id: uuid.UUID
+ created_at: datetime | None = None
+
+
+class ItemsPublic(SQLModel):
+ data: list[ItemPublic]
+ count: int
+
+
+# Generic message
+class Message(SQLModel):
+ message: str
+
+
+# JSON payload containing access token
+class Token(SQLModel):
+ access_token: str
+ token_type: str = "bearer"
+
+
+# Contents of JWT token
+class TokenPayload(SQLModel):
+ sub: str | None = None
+
+
+class NewPassword(SQLModel):
+ token: str
+ new_password: str = Field(min_length=8, max_length=128)
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/tests_pre_start.py b/backend/app/tests_pre_start.py
similarity index 74%
rename from {{cookiecutter.project_slug}}/backend/app/app/tests_pre_start.py
rename to backend/app/tests_pre_start.py
index 81de37134f..0ce6045635 100644
--- a/{{cookiecutter.project_slug}}/backend/app/app/tests_pre_start.py
+++ b/backend/app/tests_pre_start.py
@@ -1,8 +1,10 @@
import logging
+from sqlalchemy import Engine
+from sqlmodel import Session, select
from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed
-from app.db.session import SessionLocal
+from app.core.db import engine
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@@ -17,11 +19,11 @@
before=before_log(logger, logging.INFO),
after=after_log(logger, logging.WARN),
)
-def init() -> None:
+def init(db_engine: Engine) -> None:
try:
# Try to create session to check if DB is awake
- db = SessionLocal()
- db.execute("SELECT 1")
+ with Session(db_engine) as session:
+ session.exec(select(1))
except Exception as e:
logger.error(e)
raise e
@@ -29,7 +31,7 @@ def init() -> None:
def main() -> None:
logger.info("Initializing service")
- init()
+ init(engine)
logger.info("Service finished initializing")
diff --git a/backend/app/utils.py b/backend/app/utils.py
new file mode 100644
index 0000000000..ac029f6342
--- /dev/null
+++ b/backend/app/utils.py
@@ -0,0 +1,123 @@
+import logging
+from dataclasses import dataclass
+from datetime import datetime, timedelta, timezone
+from pathlib import Path
+from typing import Any
+
+import emails # type: ignore
+import jwt
+from jinja2 import Template
+from jwt.exceptions import InvalidTokenError
+
+from app.core import security
+from app.core.config import settings
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+
+@dataclass
+class EmailData:
+ html_content: str
+ subject: str
+
+
+def render_email_template(*, template_name: str, context: dict[str, Any]) -> str:
+ template_str = (
+ Path(__file__).parent / "email-templates" / "build" / template_name
+ ).read_text()
+ html_content = Template(template_str).render(context)
+ return html_content
+
+
+def send_email(
+ *,
+ email_to: str,
+ subject: str = "",
+ html_content: str = "",
+) -> None:
+ assert settings.emails_enabled, "no provided configuration for email variables"
+ message = emails.Message(
+ subject=subject,
+ html=html_content,
+ mail_from=(settings.EMAILS_FROM_NAME, settings.EMAILS_FROM_EMAIL),
+ )
+ smtp_options = {"host": settings.SMTP_HOST, "port": settings.SMTP_PORT}
+ if settings.SMTP_TLS:
+ smtp_options["tls"] = True
+ elif settings.SMTP_SSL:
+ smtp_options["ssl"] = True
+ if settings.SMTP_USER:
+ smtp_options["user"] = settings.SMTP_USER
+ if settings.SMTP_PASSWORD:
+ smtp_options["password"] = settings.SMTP_PASSWORD
+ response = message.send(to=email_to, smtp=smtp_options)
+ logger.info(f"send email result: {response}")
+
+
+def generate_test_email(email_to: str) -> EmailData:
+ project_name = settings.PROJECT_NAME
+ subject = f"{project_name} - Test email"
+ html_content = render_email_template(
+ template_name="test_email.html",
+ context={"project_name": settings.PROJECT_NAME, "email": email_to},
+ )
+ return EmailData(html_content=html_content, subject=subject)
+
+
+def generate_reset_password_email(email_to: str, email: str, token: str) -> EmailData:
+ project_name = settings.PROJECT_NAME
+ subject = f"{project_name} - Password recovery for user {email}"
+ link = f"{settings.FRONTEND_HOST}/reset-password?token={token}"
+ html_content = render_email_template(
+ template_name="reset_password.html",
+ context={
+ "project_name": settings.PROJECT_NAME,
+ "username": email,
+ "email": email_to,
+ "valid_hours": settings.EMAIL_RESET_TOKEN_EXPIRE_HOURS,
+ "link": link,
+ },
+ )
+ return EmailData(html_content=html_content, subject=subject)
+
+
+def generate_new_account_email(
+ email_to: str, username: str, password: str
+) -> EmailData:
+ project_name = settings.PROJECT_NAME
+ subject = f"{project_name} - New account for user {username}"
+ html_content = render_email_template(
+ template_name="new_account.html",
+ context={
+ "project_name": settings.PROJECT_NAME,
+ "username": username,
+ "password": password,
+ "email": email_to,
+ "link": settings.FRONTEND_HOST,
+ },
+ )
+ return EmailData(html_content=html_content, subject=subject)
+
+
+def generate_password_reset_token(email: str) -> str:
+ delta = timedelta(hours=settings.EMAIL_RESET_TOKEN_EXPIRE_HOURS)
+ now = datetime.now(timezone.utc)
+ expires = now + delta
+ exp = expires.timestamp()
+ encoded_jwt = jwt.encode(
+ {"exp": exp, "nbf": now, "sub": email},
+ settings.SECRET_KEY,
+ algorithm=security.ALGORITHM,
+ )
+ return encoded_jwt
+
+
+def verify_password_reset_token(token: str) -> str | None:
+ try:
+ decoded_token = jwt.decode(
+ token, settings.SECRET_KEY, algorithms=[security.ALGORITHM]
+ )
+ return str(decoded_token["sub"])
+ except InvalidTokenError:
+ return None
diff --git a/backend/pyproject.toml b/backend/pyproject.toml
new file mode 100644
index 0000000000..3c876f11e0
--- /dev/null
+++ b/backend/pyproject.toml
@@ -0,0 +1,77 @@
+[project]
+name = "app"
+version = "0.1.0"
+description = ""
+requires-python = ">=3.10,<4.0"
+dependencies = [
+ "fastapi[standard]<1.0.0,>=0.114.2",
+ "python-multipart<1.0.0,>=0.0.7",
+ "email-validator<3.0.0.0,>=2.1.0.post1",
+ "tenacity<9.0.0,>=8.2.3",
+ "pydantic>2.0",
+ "emails<1.0,>=0.6",
+ "jinja2<4.0.0,>=3.1.4",
+ "alembic<2.0.0,>=1.12.1",
+ "httpx<1.0.0,>=0.25.1",
+ "psycopg[binary]<4.0.0,>=3.1.13",
+ "sqlmodel<1.0.0,>=0.0.21",
+ "pydantic-settings<3.0.0,>=2.2.1",
+ "sentry-sdk[fastapi]<2.0.0,>=1.40.6",
+ "pyjwt<3.0.0,>=2.8.0",
+ "pwdlib[argon2,bcrypt]>=0.3.0",
+]
+
+[dependency-groups]
+dev = [
+ "pytest<8.0.0,>=7.4.3",
+ "mypy<2.0.0,>=1.8.0",
+ "ruff<1.0.0,>=0.2.2",
+ "prek>=0.2.24,<1.0.0",
+ "coverage<8.0.0,>=7.4.3",
+]
+
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.mypy]
+strict = true
+exclude = ["venv", ".venv", "alembic"]
+
+[tool.ruff]
+target-version = "py310"
+exclude = ["alembic"]
+
+[tool.ruff.lint]
+select = [
+ "E", # pycodestyle errors
+ "W", # pycodestyle warnings
+ "F", # pyflakes
+ "I", # isort
+ "B", # flake8-bugbear
+ "C4", # flake8-comprehensions
+ "UP", # pyupgrade
+ "ARG001", # unused arguments in functions
+ "T201", # print statements are not allowed
+]
+ignore = [
+ "E501", # line too long, handled by black
+ "B008", # do not perform function calls in argument defaults
+ "W191", # indentation contains tabs
+ "B904", # Allow raising exceptions without from e, for HTTPException
+]
+
+[tool.ruff.lint.pyupgrade]
+# Preserve types, even if a file imports `from __future__ import annotations`.
+keep-runtime-typing = true
+
+[tool.coverage.run]
+source = ["app"]
+dynamic_context = "test_function"
+
+[tool.coverage.report]
+show_missing = true
+sort = "-Cover"
+
+[tool.coverage.html]
+show_contexts = true
diff --git a/backend/scripts/format.sh b/backend/scripts/format.sh
new file mode 100755
index 0000000000..7be2f81205
--- /dev/null
+++ b/backend/scripts/format.sh
@@ -0,0 +1,5 @@
+#!/bin/sh -e
+set -x
+
+ruff check app scripts --fix
+ruff format app scripts
diff --git a/backend/scripts/lint.sh b/backend/scripts/lint.sh
new file mode 100644
index 0000000000..b3b2b4ecc7
--- /dev/null
+++ b/backend/scripts/lint.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+set -e
+set -x
+
+mypy app
+ruff check app
+ruff format app --check
diff --git a/{{cookiecutter.project_slug}}/backend/app/prestart.sh b/backend/scripts/prestart.sh
similarity index 59%
rename from {{cookiecutter.project_slug}}/backend/app/prestart.sh
rename to backend/scripts/prestart.sh
index fc1e5f1890..1b395d513f 100644
--- a/{{cookiecutter.project_slug}}/backend/app/prestart.sh
+++ b/backend/scripts/prestart.sh
@@ -1,10 +1,13 @@
#! /usr/bin/env bash
+set -e
+set -x
+
# Let the DB start
-python /app/app/backend_pre_start.py
+python app/backend_pre_start.py
# Run migrations
alembic upgrade head
# Create initial data in DB
-python /app/app/initial_data.py
+python app/initial_data.py
diff --git a/backend/scripts/test.sh b/backend/scripts/test.sh
new file mode 100755
index 0000000000..38c3e8909a
--- /dev/null
+++ b/backend/scripts/test.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+set -e
+set -x
+
+coverage run -m pytest tests/
+coverage report
+coverage html --title "${@-coverage}"
diff --git a/backend/scripts/tests-start.sh b/backend/scripts/tests-start.sh
new file mode 100644
index 0000000000..89dcb0da23
--- /dev/null
+++ b/backend/scripts/tests-start.sh
@@ -0,0 +1,7 @@
+#! /usr/bin/env bash
+set -e
+set -x
+
+python app/tests_pre_start.py
+
+bash scripts/test.sh "$@"
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/core/__init__.py b/backend/tests/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/core/__init__.py
rename to backend/tests/__init__.py
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/db/__init__.py b/backend/tests/api/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/db/__init__.py
rename to backend/tests/api/__init__.py
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/tests/__init__.py b/backend/tests/api/routes/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/tests/__init__.py
rename to backend/tests/api/routes/__init__.py
diff --git a/backend/tests/api/routes/test_items.py b/backend/tests/api/routes/test_items.py
new file mode 100644
index 0000000000..3e82cd0134
--- /dev/null
+++ b/backend/tests/api/routes/test_items.py
@@ -0,0 +1,164 @@
+import uuid
+
+from fastapi.testclient import TestClient
+from sqlmodel import Session
+
+from app.core.config import settings
+from tests.utils.item import create_random_item
+
+
+def test_create_item(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ data = {"title": "Foo", "description": "Fighters"}
+ response = client.post(
+ f"{settings.API_V1_STR}/items/",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert response.status_code == 200
+ content = response.json()
+ assert content["title"] == data["title"]
+ assert content["description"] == data["description"]
+ assert "id" in content
+ assert "owner_id" in content
+
+
+def test_read_item(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ item = create_random_item(db)
+ response = client.get(
+ f"{settings.API_V1_STR}/items/{item.id}",
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 200
+ content = response.json()
+ assert content["title"] == item.title
+ assert content["description"] == item.description
+ assert content["id"] == str(item.id)
+ assert content["owner_id"] == str(item.owner_id)
+
+
+def test_read_item_not_found(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ response = client.get(
+ f"{settings.API_V1_STR}/items/{uuid.uuid4()}",
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 404
+ content = response.json()
+ assert content["detail"] == "Item not found"
+
+
+def test_read_item_not_enough_permissions(
+ client: TestClient, normal_user_token_headers: dict[str, str], db: Session
+) -> None:
+ item = create_random_item(db)
+ response = client.get(
+ f"{settings.API_V1_STR}/items/{item.id}",
+ headers=normal_user_token_headers,
+ )
+ assert response.status_code == 403
+ content = response.json()
+ assert content["detail"] == "Not enough permissions"
+
+
+def test_read_items(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ create_random_item(db)
+ create_random_item(db)
+ response = client.get(
+ f"{settings.API_V1_STR}/items/",
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 200
+ content = response.json()
+ assert len(content["data"]) >= 2
+
+
+def test_update_item(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ item = create_random_item(db)
+ data = {"title": "Updated title", "description": "Updated description"}
+ response = client.put(
+ f"{settings.API_V1_STR}/items/{item.id}",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert response.status_code == 200
+ content = response.json()
+ assert content["title"] == data["title"]
+ assert content["description"] == data["description"]
+ assert content["id"] == str(item.id)
+ assert content["owner_id"] == str(item.owner_id)
+
+
+def test_update_item_not_found(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ data = {"title": "Updated title", "description": "Updated description"}
+ response = client.put(
+ f"{settings.API_V1_STR}/items/{uuid.uuid4()}",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert response.status_code == 404
+ content = response.json()
+ assert content["detail"] == "Item not found"
+
+
+def test_update_item_not_enough_permissions(
+ client: TestClient, normal_user_token_headers: dict[str, str], db: Session
+) -> None:
+ item = create_random_item(db)
+ data = {"title": "Updated title", "description": "Updated description"}
+ response = client.put(
+ f"{settings.API_V1_STR}/items/{item.id}",
+ headers=normal_user_token_headers,
+ json=data,
+ )
+ assert response.status_code == 403
+ content = response.json()
+ assert content["detail"] == "Not enough permissions"
+
+
+def test_delete_item(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ item = create_random_item(db)
+ response = client.delete(
+ f"{settings.API_V1_STR}/items/{item.id}",
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 200
+ content = response.json()
+ assert content["message"] == "Item deleted successfully"
+
+
+def test_delete_item_not_found(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ response = client.delete(
+ f"{settings.API_V1_STR}/items/{uuid.uuid4()}",
+ headers=superuser_token_headers,
+ )
+ assert response.status_code == 404
+ content = response.json()
+ assert content["detail"] == "Item not found"
+
+
+def test_delete_item_not_enough_permissions(
+ client: TestClient, normal_user_token_headers: dict[str, str], db: Session
+) -> None:
+ item = create_random_item(db)
+ response = client.delete(
+ f"{settings.API_V1_STR}/items/{item.id}",
+ headers=normal_user_token_headers,
+ )
+ assert response.status_code == 403
+ content = response.json()
+ assert content["detail"] == "Not enough permissions"
diff --git a/backend/tests/api/routes/test_login.py b/backend/tests/api/routes/test_login.py
new file mode 100644
index 0000000000..96677a25f6
--- /dev/null
+++ b/backend/tests/api/routes/test_login.py
@@ -0,0 +1,191 @@
+from unittest.mock import patch
+
+from fastapi.testclient import TestClient
+from pwdlib.hashers.bcrypt import BcryptHasher
+from sqlmodel import Session
+
+from app.core.config import settings
+from app.core.security import get_password_hash, verify_password
+from app.crud import create_user
+from app.models import User, UserCreate
+from app.utils import generate_password_reset_token
+from tests.utils.user import user_authentication_headers
+from tests.utils.utils import random_email, random_lower_string
+
+
+def test_get_access_token(client: TestClient) -> None:
+ login_data = {
+ "username": settings.FIRST_SUPERUSER,
+ "password": settings.FIRST_SUPERUSER_PASSWORD,
+ }
+ r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data)
+ tokens = r.json()
+ assert r.status_code == 200
+ assert "access_token" in tokens
+ assert tokens["access_token"]
+
+
+def test_get_access_token_incorrect_password(client: TestClient) -> None:
+ login_data = {
+ "username": settings.FIRST_SUPERUSER,
+ "password": "incorrect",
+ }
+ r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data)
+ assert r.status_code == 400
+
+
+def test_use_access_token(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ r = client.post(
+ f"{settings.API_V1_STR}/login/test-token",
+ headers=superuser_token_headers,
+ )
+ result = r.json()
+ assert r.status_code == 200
+ assert "email" in result
+
+
+def test_recovery_password(
+ client: TestClient, normal_user_token_headers: dict[str, str]
+) -> None:
+ with (
+ patch("app.core.config.settings.SMTP_HOST", "smtp.example.com"),
+ patch("app.core.config.settings.SMTP_USER", "admin@example.com"),
+ ):
+ email = "test@example.com"
+ r = client.post(
+ f"{settings.API_V1_STR}/password-recovery/{email}",
+ headers=normal_user_token_headers,
+ )
+ assert r.status_code == 200
+ assert r.json() == {
+ "message": "If that email is registered, we sent a password recovery link"
+ }
+
+
+def test_recovery_password_user_not_exits(
+ client: TestClient, normal_user_token_headers: dict[str, str]
+) -> None:
+ email = "jVgQr@example.com"
+ r = client.post(
+ f"{settings.API_V1_STR}/password-recovery/{email}",
+ headers=normal_user_token_headers,
+ )
+ # Should return 200 with generic message to prevent email enumeration attacks
+ assert r.status_code == 200
+ assert r.json() == {
+ "message": "If that email is registered, we sent a password recovery link"
+ }
+
+
+def test_reset_password(client: TestClient, db: Session) -> None:
+ email = random_email()
+ password = random_lower_string()
+ new_password = random_lower_string()
+
+ user_create = UserCreate(
+ email=email,
+ full_name="Test User",
+ password=password,
+ is_active=True,
+ is_superuser=False,
+ )
+ user = create_user(session=db, user_create=user_create)
+ token = generate_password_reset_token(email=email)
+ headers = user_authentication_headers(client=client, email=email, password=password)
+ data = {"new_password": new_password, "token": token}
+
+ r = client.post(
+ f"{settings.API_V1_STR}/reset-password/",
+ headers=headers,
+ json=data,
+ )
+
+ assert r.status_code == 200
+ assert r.json() == {"message": "Password updated successfully"}
+
+ db.refresh(user)
+ verified, _ = verify_password(new_password, user.hashed_password)
+ assert verified
+
+
+def test_reset_password_invalid_token(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ data = {"new_password": "changethis", "token": "invalid"}
+ r = client.post(
+ f"{settings.API_V1_STR}/reset-password/",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ response = r.json()
+
+ assert "detail" in response
+ assert r.status_code == 400
+ assert response["detail"] == "Invalid token"
+
+
+def test_login_with_bcrypt_password_upgrades_to_argon2(
+ client: TestClient, db: Session
+) -> None:
+ """Test that logging in with a bcrypt password hash upgrades it to argon2."""
+ email = random_email()
+ password = random_lower_string()
+
+ # Create a bcrypt hash directly (simulating legacy password)
+ bcrypt_hasher = BcryptHasher()
+ bcrypt_hash = bcrypt_hasher.hash(password)
+ assert bcrypt_hash.startswith("$2") # bcrypt hashes start with $2
+
+ user = User(email=email, hashed_password=bcrypt_hash, is_active=True)
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+
+ assert user.hashed_password.startswith("$2")
+
+ login_data = {"username": email, "password": password}
+ r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data)
+ assert r.status_code == 200
+ tokens = r.json()
+ assert "access_token" in tokens
+
+ db.refresh(user)
+
+ # Verify the hash was upgraded to argon2
+ assert user.hashed_password.startswith("$argon2")
+
+ verified, updated_hash = verify_password(password, user.hashed_password)
+ assert verified
+ # Should not need another update since it's already argon2
+ assert updated_hash is None
+
+
+def test_login_with_argon2_password_keeps_hash(client: TestClient, db: Session) -> None:
+ """Test that logging in with an argon2 password hash does not update it."""
+ email = random_email()
+ password = random_lower_string()
+
+ # Create an argon2 hash (current default)
+ argon2_hash = get_password_hash(password)
+ assert argon2_hash.startswith("$argon2")
+
+ # Create user with argon2 hash
+ user = User(email=email, hashed_password=argon2_hash, is_active=True)
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+
+ original_hash = user.hashed_password
+
+ login_data = {"username": email, "password": password}
+ r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data)
+ assert r.status_code == 200
+ tokens = r.json()
+ assert "access_token" in tokens
+
+ db.refresh(user)
+
+ assert user.hashed_password == original_hash
+ assert user.hashed_password.startswith("$argon2")
diff --git a/backend/tests/api/routes/test_private.py b/backend/tests/api/routes/test_private.py
new file mode 100644
index 0000000000..1e1f985021
--- /dev/null
+++ b/backend/tests/api/routes/test_private.py
@@ -0,0 +1,26 @@
+from fastapi.testclient import TestClient
+from sqlmodel import Session, select
+
+from app.core.config import settings
+from app.models import User
+
+
+def test_create_user(client: TestClient, db: Session) -> None:
+ r = client.post(
+ f"{settings.API_V1_STR}/private/users/",
+ json={
+ "email": "pollo@listo.com",
+ "password": "password123",
+ "full_name": "Pollo Listo",
+ },
+ )
+
+ assert r.status_code == 200
+
+ data = r.json()
+
+ user = db.exec(select(User).where(User.id == data["id"])).first()
+
+ assert user
+ assert user.email == "pollo@listo.com"
+ assert user.full_name == "Pollo Listo"
diff --git a/backend/tests/api/routes/test_users.py b/backend/tests/api/routes/test_users.py
new file mode 100644
index 0000000000..9c4cdd5991
--- /dev/null
+++ b/backend/tests/api/routes/test_users.py
@@ -0,0 +1,521 @@
+import uuid
+from unittest.mock import patch
+
+from fastapi.testclient import TestClient
+from sqlmodel import Session, select
+
+from app import crud
+from app.core.config import settings
+from app.core.security import verify_password
+from app.models import User, UserCreate
+from tests.utils.user import create_random_user
+from tests.utils.utils import random_email, random_lower_string
+
+
+def test_get_users_superuser_me(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ r = client.get(f"{settings.API_V1_STR}/users/me", headers=superuser_token_headers)
+ current_user = r.json()
+ assert current_user
+ assert current_user["is_active"] is True
+ assert current_user["is_superuser"]
+ assert current_user["email"] == settings.FIRST_SUPERUSER
+
+
+def test_get_users_normal_user_me(
+ client: TestClient, normal_user_token_headers: dict[str, str]
+) -> None:
+ r = client.get(f"{settings.API_V1_STR}/users/me", headers=normal_user_token_headers)
+ current_user = r.json()
+ assert current_user
+ assert current_user["is_active"] is True
+ assert current_user["is_superuser"] is False
+ assert current_user["email"] == settings.EMAIL_TEST_USER
+
+
+def test_create_user_new_email(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ with (
+ patch("app.utils.send_email", return_value=None),
+ patch("app.core.config.settings.SMTP_HOST", "smtp.example.com"),
+ patch("app.core.config.settings.SMTP_USER", "admin@example.com"),
+ ):
+ username = random_email()
+ password = random_lower_string()
+ data = {"email": username, "password": password}
+ r = client.post(
+ f"{settings.API_V1_STR}/users/",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert 200 <= r.status_code < 300
+ created_user = r.json()
+ user = crud.get_user_by_email(session=db, email=username)
+ assert user
+ assert user.email == created_user["email"]
+
+
+def test_get_existing_user_as_superuser(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+ user_id = user.id
+ r = client.get(
+ f"{settings.API_V1_STR}/users/{user_id}",
+ headers=superuser_token_headers,
+ )
+ assert 200 <= r.status_code < 300
+ api_user = r.json()
+ existing_user = crud.get_user_by_email(session=db, email=username)
+ assert existing_user
+ assert existing_user.email == api_user["email"]
+
+
+def test_get_non_existing_user_as_superuser(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ r = client.get(
+ f"{settings.API_V1_STR}/users/{uuid.uuid4()}",
+ headers=superuser_token_headers,
+ )
+ assert r.status_code == 404
+ assert r.json() == {"detail": "User not found"}
+
+
+def test_get_existing_user_current_user(client: TestClient, db: Session) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+ user_id = user.id
+
+ login_data = {
+ "username": username,
+ "password": password,
+ }
+ r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data)
+ tokens = r.json()
+ a_token = tokens["access_token"]
+ headers = {"Authorization": f"Bearer {a_token}"}
+
+ r = client.get(
+ f"{settings.API_V1_STR}/users/{user_id}",
+ headers=headers,
+ )
+ assert 200 <= r.status_code < 300
+ api_user = r.json()
+ existing_user = crud.get_user_by_email(session=db, email=username)
+ assert existing_user
+ assert existing_user.email == api_user["email"]
+
+
+def test_get_existing_user_permissions_error(
+ db: Session,
+ client: TestClient,
+ normal_user_token_headers: dict[str, str],
+) -> None:
+ user = create_random_user(db)
+
+ r = client.get(
+ f"{settings.API_V1_STR}/users/{user.id}",
+ headers=normal_user_token_headers,
+ )
+ assert r.status_code == 403
+ assert r.json() == {"detail": "The user doesn't have enough privileges"}
+
+
+def test_get_non_existing_user_permissions_error(
+ client: TestClient,
+ normal_user_token_headers: dict[str, str],
+) -> None:
+ user_id = uuid.uuid4()
+
+ r = client.get(
+ f"{settings.API_V1_STR}/users/{user_id}",
+ headers=normal_user_token_headers,
+ )
+ assert r.status_code == 403
+ assert r.json() == {"detail": "The user doesn't have enough privileges"}
+
+
+def test_create_user_existing_username(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ username = random_email()
+ # username = email
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ crud.create_user(session=db, user_create=user_in)
+ data = {"email": username, "password": password}
+ r = client.post(
+ f"{settings.API_V1_STR}/users/",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ created_user = r.json()
+ assert r.status_code == 400
+ assert "_id" not in created_user
+
+
+def test_create_user_by_normal_user(
+ client: TestClient, normal_user_token_headers: dict[str, str]
+) -> None:
+ username = random_email()
+ password = random_lower_string()
+ data = {"email": username, "password": password}
+ r = client.post(
+ f"{settings.API_V1_STR}/users/",
+ headers=normal_user_token_headers,
+ json=data,
+ )
+ assert r.status_code == 403
+
+
+def test_retrieve_users(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ crud.create_user(session=db, user_create=user_in)
+
+ username2 = random_email()
+ password2 = random_lower_string()
+ user_in2 = UserCreate(email=username2, password=password2)
+ crud.create_user(session=db, user_create=user_in2)
+
+ r = client.get(f"{settings.API_V1_STR}/users/", headers=superuser_token_headers)
+ all_users = r.json()
+
+ assert len(all_users["data"]) > 1
+ assert "count" in all_users
+ for item in all_users["data"]:
+ assert "email" in item
+
+
+def test_update_user_me(
+ client: TestClient, normal_user_token_headers: dict[str, str], db: Session
+) -> None:
+ full_name = "Updated Name"
+ email = random_email()
+ data = {"full_name": full_name, "email": email}
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/me",
+ headers=normal_user_token_headers,
+ json=data,
+ )
+ assert r.status_code == 200
+ updated_user = r.json()
+ assert updated_user["email"] == email
+ assert updated_user["full_name"] == full_name
+
+ user_query = select(User).where(User.email == email)
+ user_db = db.exec(user_query).first()
+ assert user_db
+ assert user_db.email == email
+ assert user_db.full_name == full_name
+
+
+def test_update_password_me(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ new_password = random_lower_string()
+ data = {
+ "current_password": settings.FIRST_SUPERUSER_PASSWORD,
+ "new_password": new_password,
+ }
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/me/password",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert r.status_code == 200
+ updated_user = r.json()
+ assert updated_user["message"] == "Password updated successfully"
+
+ user_query = select(User).where(User.email == settings.FIRST_SUPERUSER)
+ user_db = db.exec(user_query).first()
+ assert user_db
+ assert user_db.email == settings.FIRST_SUPERUSER
+ verified, _ = verify_password(new_password, user_db.hashed_password)
+ assert verified
+
+ # Revert to the old password to keep consistency in test
+ old_data = {
+ "current_password": new_password,
+ "new_password": settings.FIRST_SUPERUSER_PASSWORD,
+ }
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/me/password",
+ headers=superuser_token_headers,
+ json=old_data,
+ )
+ db.refresh(user_db)
+
+ assert r.status_code == 200
+ verified, _ = verify_password(
+ settings.FIRST_SUPERUSER_PASSWORD, user_db.hashed_password
+ )
+ assert verified
+
+
+def test_update_password_me_incorrect_password(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ new_password = random_lower_string()
+ data = {"current_password": new_password, "new_password": new_password}
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/me/password",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert r.status_code == 400
+ updated_user = r.json()
+ assert updated_user["detail"] == "Incorrect password"
+
+
+def test_update_user_me_email_exists(
+ client: TestClient, normal_user_token_headers: dict[str, str], db: Session
+) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+
+ data = {"email": user.email}
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/me",
+ headers=normal_user_token_headers,
+ json=data,
+ )
+ assert r.status_code == 409
+ assert r.json()["detail"] == "User with this email already exists"
+
+
+def test_update_password_me_same_password_error(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ data = {
+ "current_password": settings.FIRST_SUPERUSER_PASSWORD,
+ "new_password": settings.FIRST_SUPERUSER_PASSWORD,
+ }
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/me/password",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert r.status_code == 400
+ updated_user = r.json()
+ assert (
+ updated_user["detail"] == "New password cannot be the same as the current one"
+ )
+
+
+def test_register_user(client: TestClient, db: Session) -> None:
+ username = random_email()
+ password = random_lower_string()
+ full_name = random_lower_string()
+ data = {"email": username, "password": password, "full_name": full_name}
+ r = client.post(
+ f"{settings.API_V1_STR}/users/signup",
+ json=data,
+ )
+ assert r.status_code == 200
+ created_user = r.json()
+ assert created_user["email"] == username
+ assert created_user["full_name"] == full_name
+
+ user_query = select(User).where(User.email == username)
+ user_db = db.exec(user_query).first()
+ assert user_db
+ assert user_db.email == username
+ assert user_db.full_name == full_name
+ verified, _ = verify_password(password, user_db.hashed_password)
+ assert verified
+
+
+def test_register_user_already_exists_error(client: TestClient) -> None:
+ password = random_lower_string()
+ full_name = random_lower_string()
+ data = {
+ "email": settings.FIRST_SUPERUSER,
+ "password": password,
+ "full_name": full_name,
+ }
+ r = client.post(
+ f"{settings.API_V1_STR}/users/signup",
+ json=data,
+ )
+ assert r.status_code == 400
+ assert r.json()["detail"] == "The user with this email already exists in the system"
+
+
+def test_update_user(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+
+ data = {"full_name": "Updated_full_name"}
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/{user.id}",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert r.status_code == 200
+ updated_user = r.json()
+
+ assert updated_user["full_name"] == "Updated_full_name"
+
+ user_query = select(User).where(User.email == username)
+ user_db = db.exec(user_query).first()
+ db.refresh(user_db)
+ assert user_db
+ assert user_db.full_name == "Updated_full_name"
+
+
+def test_update_user_not_exists(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ data = {"full_name": "Updated_full_name"}
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/{uuid.uuid4()}",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert r.status_code == 404
+ assert r.json()["detail"] == "The user with this id does not exist in the system"
+
+
+def test_update_user_email_exists(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+
+ username2 = random_email()
+ password2 = random_lower_string()
+ user_in2 = UserCreate(email=username2, password=password2)
+ user2 = crud.create_user(session=db, user_create=user_in2)
+
+ data = {"email": user2.email}
+ r = client.patch(
+ f"{settings.API_V1_STR}/users/{user.id}",
+ headers=superuser_token_headers,
+ json=data,
+ )
+ assert r.status_code == 409
+ assert r.json()["detail"] == "User with this email already exists"
+
+
+def test_delete_user_me(client: TestClient, db: Session) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+ user_id = user.id
+
+ login_data = {
+ "username": username,
+ "password": password,
+ }
+ r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data)
+ tokens = r.json()
+ a_token = tokens["access_token"]
+ headers = {"Authorization": f"Bearer {a_token}"}
+
+ r = client.delete(
+ f"{settings.API_V1_STR}/users/me",
+ headers=headers,
+ )
+ assert r.status_code == 200
+ deleted_user = r.json()
+ assert deleted_user["message"] == "User deleted successfully"
+ result = db.exec(select(User).where(User.id == user_id)).first()
+ assert result is None
+
+ user_query = select(User).where(User.id == user_id)
+ user_db = db.execute(user_query).first()
+ assert user_db is None
+
+
+def test_delete_user_me_as_superuser(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ r = client.delete(
+ f"{settings.API_V1_STR}/users/me",
+ headers=superuser_token_headers,
+ )
+ assert r.status_code == 403
+ response = r.json()
+ assert response["detail"] == "Super users are not allowed to delete themselves"
+
+
+def test_delete_user_super_user(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+ user_id = user.id
+ r = client.delete(
+ f"{settings.API_V1_STR}/users/{user_id}",
+ headers=superuser_token_headers,
+ )
+ assert r.status_code == 200
+ deleted_user = r.json()
+ assert deleted_user["message"] == "User deleted successfully"
+ result = db.exec(select(User).where(User.id == user_id)).first()
+ assert result is None
+
+
+def test_delete_user_not_found(
+ client: TestClient, superuser_token_headers: dict[str, str]
+) -> None:
+ r = client.delete(
+ f"{settings.API_V1_STR}/users/{uuid.uuid4()}",
+ headers=superuser_token_headers,
+ )
+ assert r.status_code == 404
+ assert r.json()["detail"] == "User not found"
+
+
+def test_delete_user_current_super_user_error(
+ client: TestClient, superuser_token_headers: dict[str, str], db: Session
+) -> None:
+ super_user = crud.get_user_by_email(session=db, email=settings.FIRST_SUPERUSER)
+ assert super_user
+ user_id = super_user.id
+
+ r = client.delete(
+ f"{settings.API_V1_STR}/users/{user_id}",
+ headers=superuser_token_headers,
+ )
+ assert r.status_code == 403
+ assert r.json()["detail"] == "Super users are not allowed to delete themselves"
+
+
+def test_delete_user_without_privileges(
+ client: TestClient, normal_user_token_headers: dict[str, str], db: Session
+) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+
+ r = client.delete(
+ f"{settings.API_V1_STR}/users/{user.id}",
+ headers=normal_user_token_headers,
+ )
+ assert r.status_code == 403
+ assert r.json()["detail"] == "The user doesn't have enough privileges"
diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py
new file mode 100644
index 0000000000..8ddab7b321
--- /dev/null
+++ b/backend/tests/conftest.py
@@ -0,0 +1,42 @@
+from collections.abc import Generator
+
+import pytest
+from fastapi.testclient import TestClient
+from sqlmodel import Session, delete
+
+from app.core.config import settings
+from app.core.db import engine, init_db
+from app.main import app
+from app.models import Item, User
+from tests.utils.user import authentication_token_from_email
+from tests.utils.utils import get_superuser_token_headers
+
+
+@pytest.fixture(scope="session", autouse=True)
+def db() -> Generator[Session, None, None]:
+ with Session(engine) as session:
+ init_db(session)
+ yield session
+ statement = delete(Item)
+ session.execute(statement)
+ statement = delete(User)
+ session.execute(statement)
+ session.commit()
+
+
+@pytest.fixture(scope="module")
+def client() -> Generator[TestClient, None, None]:
+ with TestClient(app) as c:
+ yield c
+
+
+@pytest.fixture(scope="module")
+def superuser_token_headers(client: TestClient) -> dict[str, str]:
+ return get_superuser_token_headers(client)
+
+
+@pytest.fixture(scope="module")
+def normal_user_token_headers(client: TestClient, db: Session) -> dict[str, str]:
+ return authentication_token_from_email(
+ client=client, email=settings.EMAIL_TEST_USER, db=db
+ )
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/tests/api/__init__.py b/backend/tests/crud/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/tests/api/__init__.py
rename to backend/tests/crud/__init__.py
diff --git a/backend/tests/crud/test_user.py b/backend/tests/crud/test_user.py
new file mode 100644
index 0000000000..3db77ef624
--- /dev/null
+++ b/backend/tests/crud/test_user.py
@@ -0,0 +1,130 @@
+from fastapi.encoders import jsonable_encoder
+from pwdlib.hashers.bcrypt import BcryptHasher
+from sqlmodel import Session
+
+from app import crud
+from app.core.security import verify_password
+from app.models import User, UserCreate, UserUpdate
+from tests.utils.utils import random_email, random_lower_string
+
+
+def test_create_user(db: Session) -> None:
+ email = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=email, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+ assert user.email == email
+ assert hasattr(user, "hashed_password")
+
+
+def test_authenticate_user(db: Session) -> None:
+ email = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=email, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+ authenticated_user = crud.authenticate(session=db, email=email, password=password)
+ assert authenticated_user
+ assert user.email == authenticated_user.email
+
+
+def test_not_authenticate_user(db: Session) -> None:
+ email = random_email()
+ password = random_lower_string()
+ user = crud.authenticate(session=db, email=email, password=password)
+ assert user is None
+
+
+def test_check_if_user_is_active(db: Session) -> None:
+ email = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=email, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+ assert user.is_active is True
+
+
+def test_check_if_user_is_active_inactive(db: Session) -> None:
+ email = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=email, password=password, is_active=False)
+ user = crud.create_user(session=db, user_create=user_in)
+ assert user.is_active is False
+
+
+def test_check_if_user_is_superuser(db: Session) -> None:
+ email = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=email, password=password, is_superuser=True)
+ user = crud.create_user(session=db, user_create=user_in)
+ assert user.is_superuser is True
+
+
+def test_check_if_user_is_superuser_normal_user(db: Session) -> None:
+ username = random_email()
+ password = random_lower_string()
+ user_in = UserCreate(email=username, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
+ assert user.is_superuser is False
+
+
+def test_get_user(db: Session) -> None:
+ password = random_lower_string()
+ username = random_email()
+ user_in = UserCreate(email=username, password=password, is_superuser=True)
+ user = crud.create_user(session=db, user_create=user_in)
+ user_2 = db.get(User, user.id)
+ assert user_2
+ assert user.email == user_2.email
+ assert jsonable_encoder(user) == jsonable_encoder(user_2)
+
+
+def test_update_user(db: Session) -> None:
+ password = random_lower_string()
+ email = random_email()
+ user_in = UserCreate(email=email, password=password, is_superuser=True)
+ user = crud.create_user(session=db, user_create=user_in)
+ new_password = random_lower_string()
+ user_in_update = UserUpdate(password=new_password, is_superuser=True)
+ if user.id is not None:
+ crud.update_user(session=db, db_user=user, user_in=user_in_update)
+ user_2 = db.get(User, user.id)
+ assert user_2
+ assert user.email == user_2.email
+ verified, _ = verify_password(new_password, user_2.hashed_password)
+ assert verified
+
+
+def test_authenticate_user_with_bcrypt_upgrades_to_argon2(db: Session) -> None:
+ """Test that a user with bcrypt password hash gets upgraded to argon2 on login."""
+ email = random_email()
+ password = random_lower_string()
+
+ # Create a bcrypt hash directly (simulating legacy password)
+ bcrypt_hasher = BcryptHasher()
+ bcrypt_hash = bcrypt_hasher.hash(password)
+ assert bcrypt_hash.startswith("$2") # bcrypt hashes start with $2
+
+ # Create user with bcrypt hash directly in the database
+ user = User(email=email, hashed_password=bcrypt_hash)
+ db.add(user)
+ db.commit()
+ db.refresh(user)
+
+ # Verify the hash is bcrypt before authentication
+ assert user.hashed_password.startswith("$2")
+
+ # Authenticate - this should upgrade the hash to argon2
+ authenticated_user = crud.authenticate(session=db, email=email, password=password)
+ assert authenticated_user
+ assert authenticated_user.email == email
+
+ db.refresh(authenticated_user)
+
+ # Verify the hash was upgraded to argon2
+ assert authenticated_user.hashed_password.startswith("$argon2")
+
+ verified, updated_hash = verify_password(
+ password, authenticated_user.hashed_password
+ )
+ assert verified
+ # Should not need another update since it's already argon2
+ assert updated_hash is None
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/tests/api/api_v1/__init__.py b/backend/tests/scripts/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/tests/api/api_v1/__init__.py
rename to backend/tests/scripts/__init__.py
diff --git a/backend/tests/scripts/test_backend_pre_start.py b/backend/tests/scripts/test_backend_pre_start.py
new file mode 100644
index 0000000000..e8f35c644a
--- /dev/null
+++ b/backend/tests/scripts/test_backend_pre_start.py
@@ -0,0 +1,33 @@
+from unittest.mock import MagicMock, patch
+
+from sqlmodel import select
+
+from app.backend_pre_start import init, logger
+
+
+def test_init_successful_connection() -> None:
+ engine_mock = MagicMock()
+
+ session_mock = MagicMock()
+ session_mock.__enter__.return_value = session_mock
+
+ select1 = select(1)
+
+ with (
+ patch("app.backend_pre_start.Session", return_value=session_mock),
+ patch("app.backend_pre_start.select", return_value=select1),
+ patch.object(logger, "info"),
+ patch.object(logger, "error"),
+ patch.object(logger, "warn"),
+ ):
+ try:
+ init(engine_mock)
+ connection_successful = True
+ except Exception:
+ connection_successful = False
+
+ assert connection_successful, (
+ "The database connection should be successful and not raise an exception."
+ )
+
+ session_mock.exec.assert_called_once_with(select1)
diff --git a/backend/tests/scripts/test_test_pre_start.py b/backend/tests/scripts/test_test_pre_start.py
new file mode 100644
index 0000000000..180bdd5232
--- /dev/null
+++ b/backend/tests/scripts/test_test_pre_start.py
@@ -0,0 +1,33 @@
+from unittest.mock import MagicMock, patch
+
+from sqlmodel import select
+
+from app.tests_pre_start import init, logger
+
+
+def test_init_successful_connection() -> None:
+ engine_mock = MagicMock()
+
+ session_mock = MagicMock()
+ session_mock.__enter__.return_value = session_mock
+
+ select1 = select(1)
+
+ with (
+ patch("app.tests_pre_start.Session", return_value=session_mock),
+ patch("app.tests_pre_start.select", return_value=select1),
+ patch.object(logger, "info"),
+ patch.object(logger, "error"),
+ patch.object(logger, "warn"),
+ ):
+ try:
+ init(engine_mock)
+ connection_successful = True
+ except Exception:
+ connection_successful = False
+
+ assert connection_successful, (
+ "The database connection should be successful and not raise an exception."
+ )
+
+ session_mock.exec.assert_called_once_with(select1)
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/tests/crud/__init__.py b/backend/tests/utils/__init__.py
similarity index 100%
rename from {{cookiecutter.project_slug}}/backend/app/app/tests/crud/__init__.py
rename to backend/tests/utils/__init__.py
diff --git a/backend/tests/utils/item.py b/backend/tests/utils/item.py
new file mode 100644
index 0000000000..ee51b351a6
--- /dev/null
+++ b/backend/tests/utils/item.py
@@ -0,0 +1,16 @@
+from sqlmodel import Session
+
+from app import crud
+from app.models import Item, ItemCreate
+from tests.utils.user import create_random_user
+from tests.utils.utils import random_lower_string
+
+
+def create_random_item(db: Session) -> Item:
+ user = create_random_user(db)
+ owner_id = user.id
+ assert owner_id is not None
+ title = random_lower_string()
+ description = random_lower_string()
+ item_in = ItemCreate(title=title, description=description)
+ return crud.create_item(session=db, item_in=item_in, owner_id=owner_id)
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/tests/utils/user.py b/backend/tests/utils/user.py
similarity index 60%
rename from {{cookiecutter.project_slug}}/backend/app/app/tests/utils/user.py
rename to backend/tests/utils/user.py
index 097056c197..5867431ed8 100644
--- a/{{cookiecutter.project_slug}}/backend/app/app/tests/utils/user.py
+++ b/backend/tests/utils/user.py
@@ -1,18 +1,15 @@
-from typing import Dict
-
from fastapi.testclient import TestClient
-from sqlalchemy.orm import Session
+from sqlmodel import Session
from app import crud
from app.core.config import settings
-from app.models.user import User
-from app.schemas.user import UserCreate, UserUpdate
-from app.tests.utils.utils import random_email, random_lower_string
+from app.models import User, UserCreate, UserUpdate
+from tests.utils.utils import random_email, random_lower_string
def user_authentication_headers(
*, client: TestClient, email: str, password: str
-) -> Dict[str, str]:
+) -> dict[str, str]:
data = {"username": email, "password": password}
r = client.post(f"{settings.API_V1_STR}/login/access-token", data=data)
@@ -25,26 +22,28 @@ def user_authentication_headers(
def create_random_user(db: Session) -> User:
email = random_email()
password = random_lower_string()
- user_in = UserCreate(username=email, email=email, password=password)
- user = crud.user.create(db=db, obj_in=user_in)
+ user_in = UserCreate(email=email, password=password)
+ user = crud.create_user(session=db, user_create=user_in)
return user
def authentication_token_from_email(
*, client: TestClient, email: str, db: Session
-) -> Dict[str, str]:
+) -> dict[str, str]:
"""
Return a valid token for the user with given email.
If the user doesn't exist it is created first.
"""
password = random_lower_string()
- user = crud.user.get_by_email(db, email=email)
+ user = crud.get_user_by_email(session=db, email=email)
if not user:
- user_in_create = UserCreate(username=email, email=email, password=password)
- user = crud.user.create(db, obj_in=user_in_create)
+ user_in_create = UserCreate(email=email, password=password)
+ user = crud.create_user(session=db, user_create=user_in_create)
else:
user_in_update = UserUpdate(password=password)
- user = crud.user.update(db, db_obj=user, obj_in=user_in_update)
+ if not user.id:
+ raise Exception("User id not set")
+ user = crud.update_user(session=db, db_user=user, user_in=user_in_update)
return user_authentication_headers(client=client, email=email, password=password)
diff --git a/{{cookiecutter.project_slug}}/backend/app/app/tests/utils/utils.py b/backend/tests/utils/utils.py
similarity index 88%
rename from {{cookiecutter.project_slug}}/backend/app/app/tests/utils/utils.py
rename to backend/tests/utils/utils.py
index 021fc22017..184bac44d9 100644
--- a/{{cookiecutter.project_slug}}/backend/app/app/tests/utils/utils.py
+++ b/backend/tests/utils/utils.py
@@ -1,6 +1,5 @@
import random
import string
-from typing import Dict
from fastapi.testclient import TestClient
@@ -15,7 +14,7 @@ def random_email() -> str:
return f"{random_lower_string()}@{random_lower_string()}.com"
-def get_superuser_token_headers(client: TestClient) -> Dict[str, str]:
+def get_superuser_token_headers(client: TestClient) -> dict[str, str]:
login_data = {
"username": settings.FIRST_SUPERUSER,
"password": settings.FIRST_SUPERUSER_PASSWORD,
diff --git a/bun.lock b/bun.lock
new file mode 100644
index 0000000000..0e95cb0e33
--- /dev/null
+++ b/bun.lock
@@ -0,0 +1,927 @@
+{
+ "lockfileVersion": 1,
+ "configVersion": 0,
+ "workspaces": {
+ "": {
+ "name": "fastapi-cloud",
+ },
+ "frontend": {
+ "name": "frontend",
+ "version": "0.0.0",
+ "dependencies": {
+ "@hookform/resolvers": "^5.2.2",
+ "@radix-ui/react-avatar": "^1.1.11",
+ "@radix-ui/react-checkbox": "^1.3.3",
+ "@radix-ui/react-dialog": "^1.1.15",
+ "@radix-ui/react-dropdown-menu": "^2.1.16",
+ "@radix-ui/react-label": "^2.1.8",
+ "@radix-ui/react-radio-group": "^1.3.8",
+ "@radix-ui/react-scroll-area": "^1.2.10",
+ "@radix-ui/react-select": "^2.2.6",
+ "@radix-ui/react-separator": "^1.1.8",
+ "@radix-ui/react-slot": "^1.2.4",
+ "@radix-ui/react-tabs": "^1.1.13",
+ "@radix-ui/react-tooltip": "^1.2.8",
+ "@tailwindcss/vite": "^4.1.18",
+ "@tanstack/react-query": "^5.90.12",
+ "@tanstack/react-query-devtools": "^5.91.1",
+ "@tanstack/react-router": "^1.157.3",
+ "@tanstack/react-router-devtools": "^1.156.0",
+ "@tanstack/react-table": "^8.21.3",
+ "axios": "1.13.4",
+ "class-variance-authority": "^0.7.1",
+ "clsx": "^2.1.1",
+ "form-data": "4.0.5",
+ "lucide-react": "^0.562.0",
+ "next-themes": "^0.4.6",
+ "react": "^19.1.1",
+ "react-dom": "^19.2.3",
+ "react-error-boundary": "^6.0.0",
+ "react-hook-form": "^7.68.0",
+ "react-icons": "^5.5.0",
+ "sonner": "^2.0.7",
+ "tailwind-merge": "^3.4.0",
+ "tailwindcss": "^4.1.17",
+ "zod": "^4.3.6",
+ },
+ "devDependencies": {
+ "@biomejs/biome": "^2.3.12",
+ "@hey-api/openapi-ts": "0.73.0",
+ "@playwright/test": "1.58.0",
+ "@tanstack/router-devtools": "^1.157.17",
+ "@tanstack/router-plugin": "^1.140.0",
+ "@types/node": "^25.1.0",
+ "@types/react": "^19.2.7",
+ "@types/react-dom": "^19.2.3",
+ "@vitejs/plugin-react-swc": "^4.2.2",
+ "dotenv": "^17.2.3",
+ "tw-animate-css": "^1.4.0",
+ "typescript": "^5.9.3",
+ "vite": "^7.3.0",
+ },
+ },
+ },
+ "packages": {
+ "@babel/code-frame": ["@babel/code-frame@7.28.6", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-JYgintcMjRiCvS8mMECzaEn+m3PfoQiyqukOMCCVQtoJGYJw8j/8LBJEiqkHLkfwCcs74E3pbAUFNg7d9VNJ+Q=="],
+
+ "@babel/compat-data": ["@babel/compat-data@7.28.6", "", {}, "sha512-2lfu57JtzctfIrcGMz992hyLlByuzgIk58+hhGCxjKZ3rWI82NnVLjXcaTqkI2NvlcvOskZaiZ5kjUALo3Lpxg=="],
+
+ "@babel/core": ["@babel/core@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/generator": "^7.28.6", "@babel/helper-compilation-targets": "^7.28.6", "@babel/helper-module-transforms": "^7.28.6", "@babel/helpers": "^7.28.6", "@babel/parser": "^7.28.6", "@babel/template": "^7.28.6", "@babel/traverse": "^7.28.6", "@babel/types": "^7.28.6", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-H3mcG6ZDLTlYfaSNi0iOKkigqMFvkTKlGUYlD8GW7nNOYRrevuA46iTypPyv+06V3fEmvvazfntkBU34L0azAw=="],
+
+ "@babel/generator": ["@babel/generator@7.28.6", "", { "dependencies": { "@babel/parser": "^7.28.6", "@babel/types": "^7.28.6", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" } }, "sha512-lOoVRwADj8hjf7al89tvQ2a1lf53Z+7tiXMgpZJL3maQPDxh0DgLMN62B2MKUOFcoodBHLMbDM6WAbKgNy5Suw=="],
+
+ "@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.28.6", "", { "dependencies": { "@babel/compat-data": "^7.28.6", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" } }, "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA=="],
+
+ "@babel/helper-globals": ["@babel/helper-globals@7.28.0", "", {}, "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw=="],
+
+ "@babel/helper-module-imports": ["@babel/helper-module-imports@7.28.6", "", { "dependencies": { "@babel/traverse": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw=="],
+
+ "@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.28.6", "", { "dependencies": { "@babel/helper-module-imports": "^7.28.6", "@babel/helper-validator-identifier": "^7.28.5", "@babel/traverse": "^7.28.6" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA=="],
+
+ "@babel/helper-plugin-utils": ["@babel/helper-plugin-utils@7.28.6", "", {}, "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug=="],
+
+ "@babel/helper-string-parser": ["@babel/helper-string-parser@7.27.1", "", {}, "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA=="],
+
+ "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.28.5", "", {}, "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q=="],
+
+ "@babel/helper-validator-option": ["@babel/helper-validator-option@7.27.1", "", {}, "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg=="],
+
+ "@babel/helpers": ["@babel/helpers@7.28.6", "", { "dependencies": { "@babel/template": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw=="],
+
+ "@babel/parser": ["@babel/parser@7.28.6", "", { "dependencies": { "@babel/types": "^7.28.6" }, "bin": { "parser": "bin/babel-parser.js" } }, "sha512-TeR9zWR18BvbfPmGbLampPMW+uW1NZnJlRuuHso8i87QZNq2JRF9i6RgxRqtEq+wQGsS19NNTWr2duhnE49mfQ=="],
+
+ "@babel/plugin-syntax-jsx": ["@babel/plugin-syntax-jsx@7.28.6", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.28.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w=="],
+
+ "@babel/plugin-syntax-typescript": ["@babel/plugin-syntax-typescript@7.28.6", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.28.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A=="],
+
+ "@babel/template": ["@babel/template@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/parser": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ=="],
+
+ "@babel/traverse": ["@babel/traverse@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/generator": "^7.28.6", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.6", "@babel/template": "^7.28.6", "@babel/types": "^7.28.6", "debug": "^4.3.1" } }, "sha512-fgWX62k02qtjqdSNTAGxmKYY/7FSL9WAS1o2Hu5+I5m9T0yxZzr4cnrfXQ/MX0rIifthCSs6FKTlzYbJcPtMNg=="],
+
+ "@babel/types": ["@babel/types@7.28.6", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, "sha512-0ZrskXVEHSWIqZM/sQZ4EV3jZJXRkio/WCxaqKZP1g//CEWEPSfeZFcms4XeKBCHU0ZKnIkdJeU/kF+eRp5lBg=="],
+
+ "@biomejs/biome": ["@biomejs/biome@2.3.12", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.3.12", "@biomejs/cli-darwin-x64": "2.3.12", "@biomejs/cli-linux-arm64": "2.3.12", "@biomejs/cli-linux-arm64-musl": "2.3.12", "@biomejs/cli-linux-x64": "2.3.12", "@biomejs/cli-linux-x64-musl": "2.3.12", "@biomejs/cli-win32-arm64": "2.3.12", "@biomejs/cli-win32-x64": "2.3.12" }, "bin": { "biome": "bin/biome" } }, "sha512-AR7h4aSlAvXj7TAajW/V12BOw2EiS0AqZWV5dGozf4nlLoUF/ifvD0+YgKSskT0ylA6dY1A8AwgP8kZ6yaCQnA=="],
+
+ "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.3.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-cO6fn+KiMBemva6EARDLQBxeyvLzgidaFRJi8G7OeRqz54kWK0E+uSjgFaiHlc3DZYoa0+1UFE8mDxozpc9ieg=="],
+
+ "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.3.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-/fiF/qmudKwSdvmSrSe/gOTkW77mHHkH8Iy7YC2rmpLuk27kbaUOPa7kPiH5l+3lJzTUfU/t6x1OuIq/7SGtxg=="],
+
+ "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.3.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-nbOsuQROa3DLla5vvsTZg+T5WVPGi9/vYxETm9BOuLHBJN3oWQIg3MIkE2OfL18df1ZtNkqXkH6Yg9mdTPem7A=="],
+
+ "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.3.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-aqkeSf7IH+wkzFpKeDVPSXy9uDjxtLpYA6yzkYsY+tVjwFFirSuajHDI3ul8en90XNs1NA0n8kgBrjwRi5JeyA=="],
+
+ "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.3.12", "", { "os": "linux", "cpu": "x64" }, "sha512-CQtqrJ+qEEI8tgRSTjjzk6wJAwfH3wQlkIGsM5dlecfRZaoT+XCms/mf7G4kWNexrke6mnkRzNy6w8ebV177ow=="],
+
+ "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.3.12", "", { "os": "linux", "cpu": "x64" }, "sha512-kVGWtupRRsOjvw47YFkk5mLiAdpCPMWBo1jOwAzh+juDpUb2sWarIp+iq+CPL1Wt0LLZnYtP7hH5kD6fskcxmg=="],
+
+ "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.3.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-Re4I7UnOoyE4kHMqpgtG6UvSBGBbbtvsOvBROgCCoH7EgANN6plSQhvo2W7OCITvTp7gD6oZOyZy72lUdXjqZg=="],
+
+ "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.3.12", "", { "os": "win32", "cpu": "x64" }, "sha512-qqGVWqNNek0KikwPZlOIoxtXgsNGsX+rgdEzgw82Re8nF02W+E2WokaQhpF5TdBh/D/RQ3TLppH+otp6ztN0lw=="],
+
+ "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.2", "", { "os": "aix", "cpu": "ppc64" }, "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw=="],
+
+ "@esbuild/android-arm": ["@esbuild/android-arm@0.27.2", "", { "os": "android", "cpu": "arm" }, "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA=="],
+
+ "@esbuild/android-arm64": ["@esbuild/android-arm64@0.27.2", "", { "os": "android", "cpu": "arm64" }, "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA=="],
+
+ "@esbuild/android-x64": ["@esbuild/android-x64@0.27.2", "", { "os": "android", "cpu": "x64" }, "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A=="],
+
+ "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.27.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg=="],
+
+ "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.27.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA=="],
+
+ "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.27.2", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g=="],
+
+ "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.27.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA=="],
+
+ "@esbuild/linux-arm": ["@esbuild/linux-arm@0.27.2", "", { "os": "linux", "cpu": "arm" }, "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw=="],
+
+ "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.27.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw=="],
+
+ "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.27.2", "", { "os": "linux", "cpu": "ia32" }, "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w=="],
+
+ "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.27.2", "", { "os": "linux", "cpu": "none" }, "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg=="],
+
+ "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.27.2", "", { "os": "linux", "cpu": "none" }, "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw=="],
+
+ "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.27.2", "", { "os": "linux", "cpu": "ppc64" }, "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ=="],
+
+ "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.27.2", "", { "os": "linux", "cpu": "none" }, "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA=="],
+
+ "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.27.2", "", { "os": "linux", "cpu": "s390x" }, "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w=="],
+
+ "@esbuild/linux-x64": ["@esbuild/linux-x64@0.27.2", "", { "os": "linux", "cpu": "x64" }, "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA=="],
+
+ "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.27.2", "", { "os": "none", "cpu": "arm64" }, "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw=="],
+
+ "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.27.2", "", { "os": "none", "cpu": "x64" }, "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA=="],
+
+ "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.27.2", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA=="],
+
+ "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.27.2", "", { "os": "openbsd", "cpu": "x64" }, "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg=="],
+
+ "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.27.2", "", { "os": "none", "cpu": "arm64" }, "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag=="],
+
+ "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.27.2", "", { "os": "sunos", "cpu": "x64" }, "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg=="],
+
+ "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.27.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg=="],
+
+ "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.27.2", "", { "os": "win32", "cpu": "ia32" }, "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ=="],
+
+ "@esbuild/win32-x64": ["@esbuild/win32-x64@0.27.2", "", { "os": "win32", "cpu": "x64" }, "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ=="],
+
+ "@floating-ui/core": ["@floating-ui/core@1.7.3", "", { "dependencies": { "@floating-ui/utils": "^0.2.10" } }, "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w=="],
+
+ "@floating-ui/dom": ["@floating-ui/dom@1.7.4", "", { "dependencies": { "@floating-ui/core": "^1.7.3", "@floating-ui/utils": "^0.2.10" } }, "sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA=="],
+
+ "@floating-ui/react-dom": ["@floating-ui/react-dom@2.1.6", "", { "dependencies": { "@floating-ui/dom": "^1.7.4" }, "peerDependencies": { "react": ">=16.8.0", "react-dom": ">=16.8.0" } }, "sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw=="],
+
+ "@floating-ui/utils": ["@floating-ui/utils@0.2.10", "", {}, "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ=="],
+
+ "@hey-api/json-schema-ref-parser": ["@hey-api/json-schema-ref-parser@1.0.6", "", { "dependencies": { "@jsdevtools/ono": "^7.1.3", "@types/json-schema": "^7.0.15", "js-yaml": "^4.1.0", "lodash": "^4.17.21" } }, "sha512-yktiFZoWPtEW8QKS65eqKwA5MTKp88CyiL8q72WynrBs/73SAaxlSWlA2zW/DZlywZ5hX1OYzrCC0wFdvO9c2w=="],
+
+ "@hey-api/openapi-ts": ["@hey-api/openapi-ts@0.73.0", "", { "dependencies": { "@hey-api/json-schema-ref-parser": "1.0.6", "ansi-colors": "4.1.3", "c12": "2.0.1", "color-support": "1.1.3", "commander": "13.0.0", "handlebars": "4.7.8", "open": "10.1.2" }, "peerDependencies": { "typescript": "^5.5.3" }, "bin": { "openapi-ts": "bin/index.cjs" } }, "sha512-sUscR3OIGW0k9U//28Cu6BTp3XaogWMDORj9H+5Du9E5AvTT7LZbCEDvkLhebFOPkp2cZAQfd66HiZsiwssBcQ=="],
+
+ "@hookform/resolvers": ["@hookform/resolvers@5.2.2", "", { "dependencies": { "@standard-schema/utils": "^0.3.0" }, "peerDependencies": { "react-hook-form": "^7.55.0" } }, "sha512-A/IxlMLShx3KjV/HeTcTfaMxdwy690+L/ZADoeaTltLx+CVuzkeVIPuybK3jrRfw7YZnmdKsVVHAlEPIAEUNlA=="],
+
+ "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
+
+ "@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="],
+
+ "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
+
+ "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
+
+ "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
+
+ "@jsdevtools/ono": ["@jsdevtools/ono@7.1.3", "", {}, "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg=="],
+
+ "@playwright/test": ["@playwright/test@1.58.0", "", { "dependencies": { "playwright": "1.58.0" }, "bin": { "playwright": "cli.js" } }, "sha512-fWza+Lpbj6SkQKCrU6si4iu+fD2dD3gxNHFhUPxsfXBPhnv3rRSQVd0NtBUT9Z/RhF/boCBcuUaMUSTRTopjZg=="],
+
+ "@radix-ui/number": ["@radix-ui/number@1.1.1", "", {}, "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g=="],
+
+ "@radix-ui/primitive": ["@radix-ui/primitive@1.1.3", "", {}, "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg=="],
+
+ "@radix-ui/react-arrow": ["@radix-ui/react-arrow@1.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w=="],
+
+ "@radix-ui/react-avatar": ["@radix-ui/react-avatar@1.1.11", "", { "dependencies": { "@radix-ui/react-context": "1.1.3", "@radix-ui/react-primitive": "2.1.4", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-is-hydrated": "0.1.0", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-0Qk603AHGV28BOBO34p7IgD5m+V5Sg/YovfayABkoDDBM5d3NCx0Mp4gGrjzLGes1jV5eNOE1r3itqOR33VC6Q=="],
+
+ "@radix-ui/react-checkbox": ["@radix-ui/react-checkbox@1.3.3", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-use-size": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-wBbpv+NQftHDdG86Qc0pIyXk5IR3tM8Vd0nWLKDcX8nNn4nXFOFwsKuqw2okA/1D/mpaAkmuyndrPJTYDNZtFw=="],
+
+ "@radix-ui/react-collection": ["@radix-ui/react-collection@1.1.7", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw=="],
+
+ "@radix-ui/react-compose-refs": ["@radix-ui/react-compose-refs@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg=="],
+
+ "@radix-ui/react-context": ["@radix-ui/react-context@1.1.3", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw=="],
+
+ "@radix-ui/react-dialog": ["@radix-ui/react-dialog@1.1.15", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-controllable-state": "1.2.2", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw=="],
+
+ "@radix-ui/react-direction": ["@radix-ui/react-direction@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw=="],
+
+ "@radix-ui/react-dismissable-layer": ["@radix-ui/react-dismissable-layer@1.1.11", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-escape-keydown": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg=="],
+
+ "@radix-ui/react-dropdown-menu": ["@radix-ui/react-dropdown-menu@2.1.16", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-menu": "2.1.16", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw=="],
+
+ "@radix-ui/react-focus-guards": ["@radix-ui/react-focus-guards@1.1.3", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw=="],
+
+ "@radix-ui/react-focus-scope": ["@radix-ui/react-focus-scope@1.1.7", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw=="],
+
+ "@radix-ui/react-id": ["@radix-ui/react-id@1.1.1", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg=="],
+
+ "@radix-ui/react-label": ["@radix-ui/react-label@2.1.8", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.4" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-FmXs37I6hSBVDlO4y764TNz1rLgKwjJMQ0EGte6F3Cb3f4bIuHB/iLa/8I9VKkmOy+gNHq8rql3j686ACVV21A=="],
+
+ "@radix-ui/react-menu": ["@radix-ui/react-menu@2.1.16", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-callback-ref": "1.1.1", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg=="],
+
+ "@radix-ui/react-popper": ["@radix-ui/react-popper@1.2.8", "", { "dependencies": { "@floating-ui/react-dom": "^2.0.0", "@radix-ui/react-arrow": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-rect": "1.1.1", "@radix-ui/react-use-size": "1.1.1", "@radix-ui/rect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw=="],
+
+ "@radix-ui/react-portal": ["@radix-ui/react-portal@1.1.9", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ=="],
+
+ "@radix-ui/react-presence": ["@radix-ui/react-presence@1.1.5", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ=="],
+
+ "@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.4", "", { "dependencies": { "@radix-ui/react-slot": "1.2.4" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg=="],
+
+ "@radix-ui/react-radio-group": ["@radix-ui/react-radio-group@1.3.8", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-use-size": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ=="],
+
+ "@radix-ui/react-roving-focus": ["@radix-ui/react-roving-focus@1.1.11", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA=="],
+
+ "@radix-ui/react-scroll-area": ["@radix-ui/react-scroll-area@1.2.10", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A=="],
+
+ "@radix-ui/react-select": ["@radix-ui/react-select@2.2.6", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-visually-hidden": "1.2.3", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ=="],
+
+ "@radix-ui/react-separator": ["@radix-ui/react-separator@1.1.8", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.4" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-sDvqVY4itsKwwSMEe0jtKgfTh+72Sy3gPmQpjqcQneqQ4PFmr/1I0YA+2/puilhggCe2gJcx5EBAYFkWkdpa5g=="],
+
+ "@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.4", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA=="],
+
+ "@radix-ui/react-tabs": ["@radix-ui/react-tabs@1.1.13", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A=="],
+
+ "@radix-ui/react-tooltip": ["@radix-ui/react-tooltip@1.2.8", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-visually-hidden": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg=="],
+
+ "@radix-ui/react-use-callback-ref": ["@radix-ui/react-use-callback-ref@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg=="],
+
+ "@radix-ui/react-use-controllable-state": ["@radix-ui/react-use-controllable-state@1.2.2", "", { "dependencies": { "@radix-ui/react-use-effect-event": "0.0.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg=="],
+
+ "@radix-ui/react-use-effect-event": ["@radix-ui/react-use-effect-event@0.0.2", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA=="],
+
+ "@radix-ui/react-use-escape-keydown": ["@radix-ui/react-use-escape-keydown@1.1.1", "", { "dependencies": { "@radix-ui/react-use-callback-ref": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g=="],
+
+ "@radix-ui/react-use-is-hydrated": ["@radix-ui/react-use-is-hydrated@0.1.0", "", { "dependencies": { "use-sync-external-store": "^1.5.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-U+UORVEq+cTnRIaostJv9AGdV3G6Y+zbVd+12e18jQ5A3c0xL03IhnHuiU4UV69wolOQp5GfR58NW/EgdQhwOA=="],
+
+ "@radix-ui/react-use-layout-effect": ["@radix-ui/react-use-layout-effect@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ=="],
+
+ "@radix-ui/react-use-previous": ["@radix-ui/react-use-previous@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ=="],
+
+ "@radix-ui/react-use-rect": ["@radix-ui/react-use-rect@1.1.1", "", { "dependencies": { "@radix-ui/rect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w=="],
+
+ "@radix-ui/react-use-size": ["@radix-ui/react-use-size@1.1.1", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ=="],
+
+ "@radix-ui/react-visually-hidden": ["@radix-ui/react-visually-hidden@1.2.3", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug=="],
+
+ "@radix-ui/rect": ["@radix-ui/rect@1.1.1", "", {}, "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw=="],
+
+ "@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.47", "", {}, "sha512-8QagwMH3kNCuzD8EWL8R2YPW5e4OrHNSAHRFDdmFqEwEaD/KcNKjVoumo+gP2vW5eKB2UPbM6vTYiGZX0ixLnw=="],
+
+ "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.55.2", "", { "os": "android", "cpu": "arm" }, "sha512-21J6xzayjy3O6NdnlO6aXi/urvSRjm6nCI6+nF6ra2YofKruGixN9kfT+dt55HVNwfDmpDHJcaS3JuP/boNnlA=="],
+
+ "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.55.2", "", { "os": "android", "cpu": "arm64" }, "sha512-eXBg7ibkNUZ+sTwbFiDKou0BAckeV6kIigK7y5Ko4mB/5A1KLhuzEKovsmfvsL8mQorkoincMFGnQuIT92SKqA=="],
+
+ "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.55.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-UCbaTklREjrc5U47ypLulAgg4njaqfOVLU18VrCrI+6E5MQjuG0lSWaqLlAJwsD7NpFV249XgB0Bi37Zh5Sz4g=="],
+
+ "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.55.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-dP67MA0cCMHFT2g5XyjtpVOtp7y4UyUxN3dhLdt11at5cPKnSm4lY+EhwNvDXIMzAMIo2KU+mc9wxaAQJTn7sQ=="],
+
+ "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.55.2", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-WDUPLUwfYV9G1yxNRJdXcvISW15mpvod1Wv3ok+Ws93w1HjIVmCIFxsG2DquO+3usMNCpJQ0wqO+3GhFdl6Fow=="],
+
+ "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.55.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-Ng95wtHVEulRwn7R0tMrlUuiLVL/HXA8Lt/MYVpy88+s5ikpntzZba1qEulTuPnPIZuOPcW9wNEiqvZxZmgmqQ=="],
+
+ "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.55.2", "", { "os": "linux", "cpu": "arm" }, "sha512-AEXMESUDWWGqD6LwO/HkqCZgUE1VCJ1OhbvYGsfqX2Y6w5quSXuyoy/Fg3nRqiwro+cJYFxiw5v4kB2ZDLhxrw=="],
+
+ "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.55.2", "", { "os": "linux", "cpu": "arm" }, "sha512-ZV7EljjBDwBBBSv570VWj0hiNTdHt9uGznDtznBB4Caj3ch5rgD4I2K1GQrtbvJ/QiB+663lLgOdcADMNVC29Q=="],
+
+ "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.55.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-uvjwc8NtQVPAJtq4Tt7Q49FOodjfbf6NpqXyW/rjXoV+iZ3EJAHLNAnKT5UJBc6ffQVgmXTUL2ifYiLABlGFqA=="],
+
+ "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.55.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-s3KoWVNnye9mm/2WpOZ3JeUiediUVw6AvY/H7jNA6qgKA2V2aM25lMkVarTDfiicn/DLq3O0a81jncXszoyCFA=="],
+
+ "@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.55.2", "", { "os": "linux", "cpu": "none" }, "sha512-gi21faacK+J8aVSyAUptML9VQN26JRxe484IbF+h3hpG+sNVoMXPduhREz2CcYr5my0NE3MjVvQ5bMKX71pfVA=="],
+
+ "@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.55.2", "", { "os": "linux", "cpu": "none" }, "sha512-qSlWiXnVaS/ceqXNfnoFZh4IiCA0EwvCivivTGbEu1qv2o+WTHpn1zNmCTAoOG5QaVr2/yhCoLScQtc/7RxshA=="],
+
+ "@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.55.2", "", { "os": "linux", "cpu": "ppc64" }, "sha512-rPyuLFNoF1B0+wolH277E780NUKf+KoEDb3OyoLbAO18BbeKi++YN6gC/zuJoPPDlQRL3fIxHxCxVEWiem2yXw=="],
+
+ "@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.55.2", "", { "os": "linux", "cpu": "ppc64" }, "sha512-g+0ZLMook31iWV4PvqKU0i9E78gaZgYpSrYPed/4Bu+nGTgfOPtfs1h11tSSRPXSjC5EzLTjV/1A7L2Vr8pJoQ=="],
+
+ "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.55.2", "", { "os": "linux", "cpu": "none" }, "sha512-i+sGeRGsjKZcQRh3BRfpLsM3LX3bi4AoEVqmGDyc50L6KfYsN45wVCSz70iQMwPWr3E5opSiLOwsC9WB4/1pqg=="],
+
+ "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.55.2", "", { "os": "linux", "cpu": "none" }, "sha512-C1vLcKc4MfFV6I0aWsC7B2Y9QcsiEcvKkfxprwkPfLaN8hQf0/fKHwSF2lcYzA9g4imqnhic729VB9Fo70HO3Q=="],
+
+ "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.55.2", "", { "os": "linux", "cpu": "s390x" }, "sha512-68gHUK/howpQjh7g7hlD9DvTTt4sNLp1Bb+Yzw2Ki0xvscm2cOdCLZNJNhd2jW8lsTPrHAHuF751BygifW4bkQ=="],
+
+ "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.55.2", "", { "os": "linux", "cpu": "x64" }, "sha512-1e30XAuaBP1MAizaOBApsgeGZge2/Byd6wV4a8oa6jPdHELbRHBiw7wvo4dp7Ie2PE8TZT4pj9RLGZv9N4qwlw=="],
+
+ "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.55.2", "", { "os": "linux", "cpu": "x64" }, "sha512-4BJucJBGbuGnH6q7kpPqGJGzZnYrpAzRd60HQSt3OpX/6/YVgSsJnNzR8Ot74io50SeVT4CtCWe/RYIAymFPwA=="],
+
+ "@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.55.2", "", { "os": "openbsd", "cpu": "x64" }, "sha512-cT2MmXySMo58ENv8p6/O6wI/h/gLnD3D6JoajwXFZH6X9jz4hARqUhWpGuQhOgLNXscfZYRQMJvZDtWNzMAIDw=="],
+
+ "@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.55.2", "", { "os": "none", "cpu": "arm64" }, "sha512-sZnyUgGkuzIXaK3jNMPmUIyJrxu/PjmATQrocpGA1WbCPX8H5tfGgRSuYtqBYAvLuIGp8SPRb1O4d1Fkb5fXaQ=="],
+
+ "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.55.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-sDpFbenhmWjNcEbBcoTV0PWvW5rPJFvu+P7XoTY0YLGRupgLbFY0XPfwIbJOObzO7QgkRDANh65RjhPmgSaAjQ=="],
+
+ "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.55.2", "", { "os": "win32", "cpu": "ia32" }, "sha512-GvJ03TqqaweWCigtKQVBErw2bEhu1tyfNQbarwr94wCGnczA9HF8wqEe3U/Lfu6EdeNP0p6R+APeHVwEqVxpUQ=="],
+
+ "@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.55.2", "", { "os": "win32", "cpu": "x64" }, "sha512-KvXsBvp13oZz9JGe5NYS7FNizLe99Ny+W8ETsuCyjXiKdiGrcz2/J/N8qxZ/RSwivqjQguug07NLHqrIHrqfYw=="],
+
+ "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.55.2", "", { "os": "win32", "cpu": "x64" }, "sha512-xNO+fksQhsAckRtDSPWaMeT1uIM+JrDRXlerpnWNXhn1TdB3YZ6uKBMBTKP0eX9XtYEP978hHk1f8332i2AW8Q=="],
+
+ "@standard-schema/utils": ["@standard-schema/utils@0.3.0", "", {}, "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g=="],
+
+ "@swc/core": ["@swc/core@1.15.10", "", { "dependencies": { "@swc/counter": "^0.1.3", "@swc/types": "^0.1.25" }, "optionalDependencies": { "@swc/core-darwin-arm64": "1.15.10", "@swc/core-darwin-x64": "1.15.10", "@swc/core-linux-arm-gnueabihf": "1.15.10", "@swc/core-linux-arm64-gnu": "1.15.10", "@swc/core-linux-arm64-musl": "1.15.10", "@swc/core-linux-x64-gnu": "1.15.10", "@swc/core-linux-x64-musl": "1.15.10", "@swc/core-win32-arm64-msvc": "1.15.10", "@swc/core-win32-ia32-msvc": "1.15.10", "@swc/core-win32-x64-msvc": "1.15.10" }, "peerDependencies": { "@swc/helpers": ">=0.5.17" }, "optionalPeers": ["@swc/helpers"] }, "sha512-udNofxftduMUEv7nqahl2nvodCiCDQ4Ge0ebzsEm6P8s0RC2tBM0Hqx0nNF5J/6t9uagFJyWIDjXy3IIWMHDJw=="],
+
+ "@swc/core-darwin-arm64": ["@swc/core-darwin-arm64@1.15.10", "", { "os": "darwin", "cpu": "arm64" }, "sha512-U72pGqmJYbjrLhMndIemZ7u9Q9owcJczGxwtfJlz/WwMaGYAV/g4nkGiUVk/+QSX8sFCAjanovcU1IUsP2YulA=="],
+
+ "@swc/core-darwin-x64": ["@swc/core-darwin-x64@1.15.10", "", { "os": "darwin", "cpu": "x64" }, "sha512-NZpDXtwHH083L40xdyj1sY31MIwLgOxKfZEAGCI8xHXdHa+GWvEiVdGiu4qhkJctoHFzAEc7ZX3GN5phuJcPuQ=="],
+
+ "@swc/core-linux-arm-gnueabihf": ["@swc/core-linux-arm-gnueabihf@1.15.10", "", { "os": "linux", "cpu": "arm" }, "sha512-ioieF5iuRziUF1HkH1gg1r93e055dAdeBAPGAk40VjqpL5/igPJ/WxFHGvc6WMLhUubSJI4S0AiZAAhEAp1jDg=="],
+
+ "@swc/core-linux-arm64-gnu": ["@swc/core-linux-arm64-gnu@1.15.10", "", { "os": "linux", "cpu": "arm64" }, "sha512-tD6BClOrxSsNus9cJL7Gxdv7z7Y2hlyvZd9l0NQz+YXzmTWqnfzLpg16ovEI7gknH2AgDBB5ywOsqu8hUgSeEQ=="],
+
+ "@swc/core-linux-arm64-musl": ["@swc/core-linux-arm64-musl@1.15.10", "", { "os": "linux", "cpu": "arm64" }, "sha512-4uAHO3nbfbrTcmO/9YcVweTQdx5fN3l7ewwl5AEK4yoC4wXmoBTEPHAVdKNe4r9+xrTgd4BgyPsy0409OjjlMw=="],
+
+ "@swc/core-linux-x64-gnu": ["@swc/core-linux-x64-gnu@1.15.10", "", { "os": "linux", "cpu": "x64" }, "sha512-W0h9ONNw1pVIA0cN7wtboOSTl4Jk3tHq+w2cMPQudu9/+3xoCxpFb9ZdehwCAk29IsvdWzGzY6P7dDVTyFwoqg=="],
+
+ "@swc/core-linux-x64-musl": ["@swc/core-linux-x64-musl@1.15.10", "", { "os": "linux", "cpu": "x64" }, "sha512-XQNZlLZB62S8nAbw7pqoqwy91Ldy2RpaMRqdRN3T+tAg6Xg6FywXRKCsLh6IQOadr4p1+lGnqM/Wn35z5a/0Vw=="],
+
+ "@swc/core-win32-arm64-msvc": ["@swc/core-win32-arm64-msvc@1.15.10", "", { "os": "win32", "cpu": "arm64" }, "sha512-qnAGrRv5Nj/DATxAmCnJQRXXQqnJwR0trxLndhoHoxGci9MuguNIjWahS0gw8YZFjgTinbTxOwzatkoySihnmw=="],
+
+ "@swc/core-win32-ia32-msvc": ["@swc/core-win32-ia32-msvc@1.15.10", "", { "os": "win32", "cpu": "ia32" }, "sha512-i4X/q8QSvzVlaRtv1xfnfl+hVKpCfiJ+9th484rh937fiEZKxZGf51C+uO0lfKDP1FfnT6C1yBYwHy7FLBVXFw=="],
+
+ "@swc/core-win32-x64-msvc": ["@swc/core-win32-x64-msvc@1.15.10", "", { "os": "win32", "cpu": "x64" }, "sha512-HvY8XUFuoTXn6lSccDLYFlXv1SU/PzYi4PyUqGT++WfTnbw/68N/7BdUZqglGRwiSqr0qhYt/EhmBpULj0J9rA=="],
+
+ "@swc/counter": ["@swc/counter@0.1.3", "", {}, "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ=="],
+
+ "@swc/types": ["@swc/types@0.1.25", "", { "dependencies": { "@swc/counter": "^0.1.3" } }, "sha512-iAoY/qRhNH8a/hBvm3zKj9qQ4oc2+3w1unPJa2XvTK3XjeLXtzcCingVPw/9e5mn1+0yPqxcBGp9Jf0pkfMb1g=="],
+
+ "@tailwindcss/node": ["@tailwindcss/node@4.1.18", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "enhanced-resolve": "^5.18.3", "jiti": "^2.6.1", "lightningcss": "1.30.2", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.1.18" } }, "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ=="],
+
+ "@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.18", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.18", "@tailwindcss/oxide-darwin-arm64": "4.1.18", "@tailwindcss/oxide-darwin-x64": "4.1.18", "@tailwindcss/oxide-freebsd-x64": "4.1.18", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", "@tailwindcss/oxide-linux-arm64-musl": "4.1.18", "@tailwindcss/oxide-linux-x64-gnu": "4.1.18", "@tailwindcss/oxide-linux-x64-musl": "4.1.18", "@tailwindcss/oxide-wasm32-wasi": "4.1.18", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", "@tailwindcss/oxide-win32-x64-msvc": "4.1.18" } }, "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A=="],
+
+ "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.18", "", { "os": "android", "cpu": "arm64" }, "sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q=="],
+
+ "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.18", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A=="],
+
+ "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.18", "", { "os": "darwin", "cpu": "x64" }, "sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw=="],
+
+ "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.18", "", { "os": "freebsd", "cpu": "x64" }, "sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA=="],
+
+ "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.18", "", { "os": "linux", "cpu": "arm" }, "sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA=="],
+
+ "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.18", "", { "os": "linux", "cpu": "arm64" }, "sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw=="],
+
+ "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.18", "", { "os": "linux", "cpu": "arm64" }, "sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg=="],
+
+ "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.18", "", { "os": "linux", "cpu": "x64" }, "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g=="],
+
+ "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.18", "", { "os": "linux", "cpu": "x64" }, "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ=="],
+
+ "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.18", "", { "cpu": "none" }, "sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA=="],
+
+ "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.18", "", { "os": "win32", "cpu": "arm64" }, "sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA=="],
+
+ "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.18", "", { "os": "win32", "cpu": "x64" }, "sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q=="],
+
+ "@tailwindcss/vite": ["@tailwindcss/vite@4.1.18", "", { "dependencies": { "@tailwindcss/node": "4.1.18", "@tailwindcss/oxide": "4.1.18", "tailwindcss": "4.1.18" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, "sha512-jVA+/UpKL1vRLg6Hkao5jldawNmRo7mQYrZtNHMIVpLfLhDml5nMRUo/8MwoX2vNXvnaXNNMedrMfMugAVX1nA=="],
+
+ "@tanstack/history": ["@tanstack/history@1.154.14", "", {}, "sha512-xyIfof8eHBuub1CkBnbKNKQXeRZC4dClhmzePHVOEel4G7lk/dW+TQ16da7CFdeNLv6u6Owf5VoBQxoo6DFTSA=="],
+
+ "@tanstack/query-core": ["@tanstack/query-core@5.90.19", "", {}, "sha512-GLW5sjPVIvH491VV1ufddnfldyVB+teCnpPIvweEfkpRx7CfUmUGhoh9cdcUKBh/KwVxk22aNEDxeTsvmyB/WA=="],
+
+ "@tanstack/query-devtools": ["@tanstack/query-devtools@5.92.0", "", {}, "sha512-N8D27KH1vEpVacvZgJL27xC6yPFUy0Zkezn5gnB3L3gRCxlDeSuiya7fKge8Y91uMTnC8aSxBQhcK6ocY7alpQ=="],
+
+ "@tanstack/react-query": ["@tanstack/react-query@5.90.19", "", { "dependencies": { "@tanstack/query-core": "5.90.19" }, "peerDependencies": { "react": "^18 || ^19" } }, "sha512-qTZRZ4QyTzQc+M0IzrbKHxSeISUmRB3RPGmao5bT+sI6ayxSRhn0FXEnT5Hg3as8SBFcRosrXXRFB+yAcxVxJQ=="],
+
+ "@tanstack/react-query-devtools": ["@tanstack/react-query-devtools@5.91.2", "", { "dependencies": { "@tanstack/query-devtools": "5.92.0" }, "peerDependencies": { "@tanstack/react-query": "^5.90.14", "react": "^18 || ^19" } }, "sha512-ZJ1503ay5fFeEYFUdo7LMNFzZryi6B0Cacrgr2h1JRkvikK1khgIq6Nq2EcblqEdIlgB/r7XDW8f8DQ89RuUgg=="],
+
+ "@tanstack/react-router": ["@tanstack/react-router@1.157.3", "", { "dependencies": { "@tanstack/history": "1.154.14", "@tanstack/react-store": "^0.8.0", "@tanstack/router-core": "1.157.3", "isbot": "^5.1.22", "tiny-invariant": "^1.3.3", "tiny-warning": "^1.0.3" }, "peerDependencies": { "react": ">=18.0.0 || >=19.0.0", "react-dom": ">=18.0.0 || >=19.0.0" } }, "sha512-nrshpAAtYRWnvJeTwItA8WwDr5oX5zOvxxcFEWIdsscLHkKsK9ED9byV4d8VfCRey+W02blBxsCKpppJfq2rnQ=="],
+
+ "@tanstack/react-router-devtools": ["@tanstack/react-router-devtools@1.157.17", "", { "dependencies": { "@tanstack/router-devtools-core": "1.157.16" }, "peerDependencies": { "@tanstack/react-router": "^1.157.17", "@tanstack/router-core": "^1.157.16", "react": ">=18.0.0 || >=19.0.0", "react-dom": ">=18.0.0 || >=19.0.0" }, "optionalPeers": ["@tanstack/router-core"] }, "sha512-ajhTEQMPK9XtgVN7KqLy9JobYbyjcbuZXc76kABA8HeUJqB98rvwdpVuB106LReeIKuTc5RLOgCrdkq2A19wpg=="],
+
+ "@tanstack/react-store": ["@tanstack/react-store@0.8.0", "", { "dependencies": { "@tanstack/store": "0.8.0", "use-sync-external-store": "^1.6.0" }, "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-1vG9beLIuB7q69skxK9r5xiLN3ztzIPfSQSs0GfeqWGO2tGIyInZx0x1COhpx97RKaONSoAb8C3dxacWksm1ow=="],
+
+ "@tanstack/react-table": ["@tanstack/react-table@8.21.3", "", { "dependencies": { "@tanstack/table-core": "8.21.3" }, "peerDependencies": { "react": ">=16.8", "react-dom": ">=16.8" } }, "sha512-5nNMTSETP4ykGegmVkhjcS8tTLW6Vl4axfEGQN3v0zdHYbK4UfoqfPChclTrJ4EoK9QynqAu9oUf8VEmrpZ5Ww=="],
+
+ "@tanstack/router-core": ["@tanstack/router-core@1.153.2", "", { "dependencies": { "@tanstack/history": "1.153.2", "@tanstack/store": "^0.8.0", "cookie-es": "^2.0.0", "seroval": "^1.4.1", "seroval-plugins": "^1.4.0", "tiny-invariant": "^1.3.3", "tiny-warning": "^1.0.3" } }, "sha512-WLaR+rSNW7bj9UCJQ3SKpuh6nZBZkpGnf2mpjn/uRB6joIQ3BU7aRdhb7w9Via/MP52iaHh5sd8NY3MaLpF2tQ=="],
+
+ "@tanstack/router-devtools": ["@tanstack/router-devtools@1.157.17", "", { "dependencies": { "@tanstack/react-router-devtools": "1.157.17", "clsx": "^2.1.1", "goober": "^2.1.16" }, "peerDependencies": { "@tanstack/react-router": "^1.157.17", "csstype": "^3.0.10", "react": ">=18.0.0 || >=19.0.0", "react-dom": ">=18.0.0 || >=19.0.0" }, "optionalPeers": ["csstype"] }, "sha512-9yZxi5Gjn6SSB645gR1wGhZb5uz+8LD3D78hTGmc2XKZ7enRxz7ToEHPTwIBCdkXYmlguONLifh1DgvcgP++yg=="],
+
+ "@tanstack/router-devtools-core": ["@tanstack/router-devtools-core@1.157.16", "", { "dependencies": { "clsx": "^2.1.1", "goober": "^2.1.16", "tiny-invariant": "^1.3.3" }, "peerDependencies": { "@tanstack/router-core": "^1.157.16", "csstype": "^3.0.10" }, "optionalPeers": ["csstype"] }, "sha512-XBJTs/kMZYK6J2zhbGucHNuypwDB1t2vi8K5To+V6dUnLGBEyfQTf01fegiF4rpL1yXgomdGnP6aTiOFgldbVg=="],
+
+ "@tanstack/router-generator": ["@tanstack/router-generator@1.153.2", "", { "dependencies": { "@tanstack/router-core": "1.153.2", "@tanstack/router-utils": "1.143.11", "@tanstack/virtual-file-routes": "1.145.4", "prettier": "^3.5.0", "recast": "^0.23.11", "source-map": "^0.7.4", "tsx": "^4.19.2", "zod": "^3.24.2" } }, "sha512-bEhmCtXq5vv3HukKq5zmTDBNDRqVllYxsHoWtqEvHv5hCb5xwKKfUMGemRoiQ96/wLFuGnA5DYkem2GZWcG3wg=="],
+
+ "@tanstack/router-plugin": ["@tanstack/router-plugin@1.153.2", "", { "dependencies": { "@babel/core": "^7.28.5", "@babel/plugin-syntax-jsx": "^7.27.1", "@babel/plugin-syntax-typescript": "^7.27.1", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.5", "@babel/types": "^7.28.5", "@tanstack/router-core": "1.153.2", "@tanstack/router-generator": "1.153.2", "@tanstack/router-utils": "1.143.11", "@tanstack/virtual-file-routes": "1.145.4", "babel-dead-code-elimination": "^1.0.11", "chokidar": "^3.6.0", "unplugin": "^2.1.2", "zod": "^3.24.2" }, "peerDependencies": { "@rsbuild/core": ">=1.0.2", "@tanstack/react-router": "^1.153.2", "vite": ">=5.0.0 || >=6.0.0 || >=7.0.0", "vite-plugin-solid": "^2.11.10", "webpack": ">=5.92.0" }, "optionalPeers": ["@rsbuild/core", "vite-plugin-solid", "webpack"] }, "sha512-aMMc70ChM0wBYOToq39kTMKI2A0EKWpumiKTJyAwEglXf0raF48+26Fmv0gr9/5CLvD0g8ljllsskVDyzg8oDw=="],
+
+ "@tanstack/router-utils": ["@tanstack/router-utils@1.143.11", "", { "dependencies": { "@babel/core": "^7.28.5", "@babel/generator": "^7.28.5", "@babel/parser": "^7.28.5", "ansis": "^4.1.0", "diff": "^8.0.2", "pathe": "^2.0.3", "tinyglobby": "^0.2.15" } }, "sha512-N24G4LpfyK8dOlnP8BvNdkuxg1xQljkyl6PcrdiPSA301pOjatRT1y8wuCCJZKVVD8gkd0MpCZ0VEjRMGILOtA=="],
+
+ "@tanstack/store": ["@tanstack/store@0.8.0", "", {}, "sha512-Om+BO0YfMZe//X2z0uLF2j+75nQga6TpTJgLJQBiq85aOyZNIhkCgleNcud2KQg4k4v9Y9l+Uhru3qWMPGTOzQ=="],
+
+ "@tanstack/table-core": ["@tanstack/table-core@8.21.3", "", {}, "sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg=="],
+
+ "@tanstack/virtual-file-routes": ["@tanstack/virtual-file-routes@1.145.4", "", {}, "sha512-CI75JrfqSluhdGwLssgVeQBaCphgfkMQpi8MCY3UJX1hoGzXa8kHYJcUuIFMOLs1q7zqHy++EVVtMK03osR5wQ=="],
+
+ "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
+
+ "@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
+
+ "@types/node": ["@types/node@25.1.0", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-t7frlewr6+cbx+9Ohpl0NOTKXZNV9xHRmNOvql47BFJKcEG1CxtxlPEEe+gR9uhVWM4DwhnvTF110mIL4yP9RA=="],
+
+ "@types/react": ["@types/react@19.2.9", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-Lpo8kgb/igvMIPeNV2rsYKTgaORYdO1XGVZ4Qz3akwOj0ySGYMPlQWa8BaLn0G63D1aSaAQ5ldR06wCpChQCjA=="],
+
+ "@types/react-dom": ["@types/react-dom@19.2.3", "", { "peerDependencies": { "@types/react": "^19.2.0" } }, "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ=="],
+
+ "@vitejs/plugin-react-swc": ["@vitejs/plugin-react-swc@4.2.2", "", { "dependencies": { "@rolldown/pluginutils": "1.0.0-beta.47", "@swc/core": "^1.13.5" }, "peerDependencies": { "vite": "^4 || ^5 || ^6 || ^7" } }, "sha512-x+rE6tsxq/gxrEJN3Nv3dIV60lFflPj94c90b+NNo6n1QV1QQUTLoL0MpaOVasUZ0zqVBn7ead1B5ecx1JAGfA=="],
+
+ "acorn": ["acorn@8.15.0", "", { "bin": "bin/acorn" }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="],
+
+ "ansi-colors": ["ansi-colors@4.1.3", "", {}, "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw=="],
+
+ "ansis": ["ansis@4.2.0", "", {}, "sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig=="],
+
+ "anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="],
+
+ "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
+
+ "aria-hidden": ["aria-hidden@1.2.6", "", { "dependencies": { "tslib": "^2.0.0" } }, "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA=="],
+
+ "ast-types": ["ast-types@0.16.1", "", { "dependencies": { "tslib": "^2.0.1" } }, "sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg=="],
+
+ "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
+
+ "axios": ["axios@1.13.4", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, "sha512-1wVkUaAO6WyaYtCkcYCOx12ZgpGf9Zif+qXa4n+oYzK558YryKqiL6UWwd5DqiH3VRW0GYhTZQ/vlgJrCoNQlg=="],
+
+ "babel-dead-code-elimination": ["babel-dead-code-elimination@1.0.12", "", { "dependencies": { "@babel/core": "^7.23.7", "@babel/parser": "^7.23.6", "@babel/traverse": "^7.23.7", "@babel/types": "^7.23.6" } }, "sha512-GERT7L2TiYcYDtYk1IpD+ASAYXjKbLTDPhBtYj7X1NuRMDTMtAx9kyBenub1Ev41lo91OHCKdmP+egTDmfQ7Ig=="],
+
+ "baseline-browser-mapping": ["baseline-browser-mapping@2.9.16", "", { "bin": "dist/cli.js" }, "sha512-KeUZdBuxngy825i8xvzaK1Ncnkx0tBmb3k8DkEuqjKRkmtvNTjey2ZsNeh8Dw4lfKvbCOu9oeNx2TKm2vHqcRw=="],
+
+ "binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="],
+
+ "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
+
+ "browserslist": ["browserslist@4.28.1", "", { "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", "electron-to-chromium": "^1.5.263", "node-releases": "^2.0.27", "update-browserslist-db": "^1.2.0" }, "bin": "cli.js" }, "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA=="],
+
+ "bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="],
+
+ "c12": ["c12@2.0.1", "", { "dependencies": { "chokidar": "^4.0.1", "confbox": "^0.1.7", "defu": "^6.1.4", "dotenv": "^16.4.5", "giget": "^1.2.3", "jiti": "^2.3.0", "mlly": "^1.7.1", "ohash": "^1.1.4", "pathe": "^1.1.2", "perfect-debounce": "^1.0.0", "pkg-types": "^1.2.0", "rc9": "^2.1.2" }, "peerDependencies": { "magicast": "^0.3.5" }, "optionalPeers": ["magicast"] }, "sha512-Z4JgsKXHG37C6PYUtIxCfLJZvo6FyhHJoClwwb9ftUkLpPSkuYqn6Tr+vnaN8hymm0kIbcg6Ey3kv/Q71k5w/A=="],
+
+ "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="],
+
+ "caniuse-lite": ["caniuse-lite@1.0.30001765", "", {}, "sha512-LWcNtSyZrakjECqmpP4qdg0MMGdN368D7X8XvvAqOcqMv0RxnlqVKZl2V6/mBR68oYMxOZPLw/gO7DuisMHUvQ=="],
+
+ "chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="],
+
+ "chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="],
+
+ "citty": ["citty@0.1.6", "", { "dependencies": { "consola": "^3.2.3" } }, "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ=="],
+
+ "class-variance-authority": ["class-variance-authority@0.7.1", "", { "dependencies": { "clsx": "^2.1.1" } }, "sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg=="],
+
+ "clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="],
+
+ "color-support": ["color-support@1.1.3", "", { "bin": "bin.js" }, "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg=="],
+
+ "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
+
+ "commander": ["commander@13.0.0", "", {}, "sha512-oPYleIY8wmTVzkvQq10AEok6YcTC4sRUBl8F9gVuwchGVUCTbl/vhLTaQqutuuySYOsu8YTgV+OxKc/8Yvx+mQ=="],
+
+ "confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="],
+
+ "consola": ["consola@3.4.2", "", {}, "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA=="],
+
+ "convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="],
+
+ "cookie-es": ["cookie-es@2.0.0", "", {}, "sha512-RAj4E421UYRgqokKUmotqAwuplYw15qtdXfY+hGzgCJ/MBjCVZcSoHK/kH9kocfjRjcDME7IiDWR/1WX1TM2Pg=="],
+
+ "csstype": ["csstype@3.2.3", "", {}, "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ=="],
+
+ "debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
+
+ "default-browser": ["default-browser@5.4.0", "", { "dependencies": { "bundle-name": "^4.1.0", "default-browser-id": "^5.0.0" } }, "sha512-XDuvSq38Hr1MdN47EDvYtx3U0MTqpCEn+F6ft8z2vYDzMrvQhVp0ui9oQdqW3MvK3vqUETglt1tVGgjLuJ5izg=="],
+
+ "default-browser-id": ["default-browser-id@5.0.1", "", {}, "sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q=="],
+
+ "define-lazy-prop": ["define-lazy-prop@3.0.0", "", {}, "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg=="],
+
+ "defu": ["defu@6.1.4", "", {}, "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg=="],
+
+ "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
+
+ "destr": ["destr@2.0.5", "", {}, "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA=="],
+
+ "detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
+
+ "detect-node-es": ["detect-node-es@1.1.0", "", {}, "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ=="],
+
+ "diff": ["diff@8.0.3", "", {}, "sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ=="],
+
+ "dotenv": ["dotenv@17.2.3", "", {}, "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w=="],
+
+ "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="],
+
+ "electron-to-chromium": ["electron-to-chromium@1.5.267", "", {}, "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw=="],
+
+ "enhanced-resolve": ["enhanced-resolve@5.18.4", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q=="],
+
+ "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
+
+ "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
+
+ "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="],
+
+ "es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="],
+
+ "esbuild": ["esbuild@0.27.2", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.2", "@esbuild/android-arm": "0.27.2", "@esbuild/android-arm64": "0.27.2", "@esbuild/android-x64": "0.27.2", "@esbuild/darwin-arm64": "0.27.2", "@esbuild/darwin-x64": "0.27.2", "@esbuild/freebsd-arm64": "0.27.2", "@esbuild/freebsd-x64": "0.27.2", "@esbuild/linux-arm": "0.27.2", "@esbuild/linux-arm64": "0.27.2", "@esbuild/linux-ia32": "0.27.2", "@esbuild/linux-loong64": "0.27.2", "@esbuild/linux-mips64el": "0.27.2", "@esbuild/linux-ppc64": "0.27.2", "@esbuild/linux-riscv64": "0.27.2", "@esbuild/linux-s390x": "0.27.2", "@esbuild/linux-x64": "0.27.2", "@esbuild/netbsd-arm64": "0.27.2", "@esbuild/netbsd-x64": "0.27.2", "@esbuild/openbsd-arm64": "0.27.2", "@esbuild/openbsd-x64": "0.27.2", "@esbuild/openharmony-arm64": "0.27.2", "@esbuild/sunos-x64": "0.27.2", "@esbuild/win32-arm64": "0.27.2", "@esbuild/win32-ia32": "0.27.2", "@esbuild/win32-x64": "0.27.2" }, "bin": "bin/esbuild" }, "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw=="],
+
+ "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
+
+ "esprima": ["esprima@4.0.1", "", { "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" } }, "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A=="],
+
+ "fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" } }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
+
+ "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
+
+ "follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="],
+
+ "form-data": ["form-data@4.0.5", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w=="],
+
+ "frontend": ["frontend@workspace:frontend"],
+
+ "fs-minipass": ["fs-minipass@2.1.0", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg=="],
+
+ "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
+
+ "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
+
+ "gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="],
+
+ "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="],
+
+ "get-nonce": ["get-nonce@1.0.1", "", {}, "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q=="],
+
+ "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="],
+
+ "get-tsconfig": ["get-tsconfig@4.13.0", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ=="],
+
+ "giget": ["giget@1.2.5", "", { "dependencies": { "citty": "^0.1.6", "consola": "^3.4.0", "defu": "^6.1.4", "node-fetch-native": "^1.6.6", "nypm": "^0.5.4", "pathe": "^2.0.3", "tar": "^6.2.1" }, "bin": "dist/cli.mjs" }, "sha512-r1ekGw/Bgpi3HLV3h1MRBIlSAdHoIMklpaQ3OQLFcRw9PwAj2rqigvIbg+dBUI51OxVI2jsEtDywDBjSiuf7Ug=="],
+
+ "glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
+
+ "goober": ["goober@2.1.18", "", { "peerDependencies": { "csstype": "^3.0.10" } }, "sha512-2vFqsaDVIT9Gz7N6kAL++pLpp41l3PfDuusHcjnGLfR6+huZkl6ziX+zgVC3ZxpqWhzH6pyDdGrCeDhMIvwaxw=="],
+
+ "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
+
+ "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
+
+ "handlebars": ["handlebars@4.7.8", "", { "dependencies": { "minimist": "^1.2.5", "neo-async": "^2.6.2", "source-map": "^0.6.1", "wordwrap": "^1.0.0" }, "optionalDependencies": { "uglify-js": "^3.1.4" }, "bin": "bin/handlebars" }, "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ=="],
+
+ "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
+
+ "has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="],
+
+ "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
+
+ "is-binary-path": ["is-binary-path@2.1.0", "", { "dependencies": { "binary-extensions": "^2.0.0" } }, "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="],
+
+ "is-docker": ["is-docker@3.0.0", "", { "bin": "cli.js" }, "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ=="],
+
+ "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
+
+ "is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
+
+ "is-inside-container": ["is-inside-container@1.0.0", "", { "dependencies": { "is-docker": "^3.0.0" }, "bin": "cli.js" }, "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA=="],
+
+ "is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
+
+ "is-wsl": ["is-wsl@3.1.0", "", { "dependencies": { "is-inside-container": "^1.0.0" } }, "sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw=="],
+
+ "isbot": ["isbot@5.1.33", "", {}, "sha512-P4Hgb5NqswjkI0J1CM6XKXon/sxKY1SuowE7Qx2hrBhIwICFyXy54mfgB5eMHXsbe/eStzzpbIGNOvGmz+dlKg=="],
+
+ "jiti": ["jiti@2.6.1", "", { "bin": "lib/jiti-cli.mjs" }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="],
+
+ "js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
+
+ "js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": "bin/js-yaml.js" }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="],
+
+ "jsesc": ["jsesc@3.1.0", "", { "bin": "bin/jsesc" }, "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA=="],
+
+ "json5": ["json5@2.2.3", "", { "bin": "lib/cli.js" }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
+
+ "lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
+
+ "lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
+
+ "lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="],
+
+ "lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="],
+
+ "lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="],
+
+ "lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="],
+
+ "lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="],
+
+ "lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="],
+
+ "lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="],
+
+ "lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="],
+
+ "lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="],
+
+ "lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
+
+ "lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
+
+ "lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="],
+
+ "lucide-react": ["lucide-react@0.562.0", "", { "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-82hOAu7y0dbVuFfmO4bYF1XEwYk/mEbM5E+b1jgci/udUBEE/R7LF5Ip0CCEmXe8AybRM8L+04eP+LGZeDvkiw=="],
+
+ "magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
+
+ "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
+
+ "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
+
+ "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
+
+ "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
+
+ "minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="],
+
+ "minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="],
+
+ "mkdirp": ["mkdirp@1.0.4", "", { "bin": "bin/cmd.js" }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="],
+
+ "mlly": ["mlly@1.8.0", "", { "dependencies": { "acorn": "^8.15.0", "pathe": "^2.0.3", "pkg-types": "^1.3.1", "ufo": "^1.6.1" } }, "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g=="],
+
+ "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
+
+ "nanoid": ["nanoid@3.3.11", "", { "bin": "bin/nanoid.cjs" }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
+
+ "neo-async": ["neo-async@2.6.2", "", {}, "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="],
+
+ "next-themes": ["next-themes@0.4.6", "", { "peerDependencies": { "react": "^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc", "react-dom": "^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc" } }, "sha512-pZvgD5L0IEvX5/9GWyHMf3m8BKiVQwsCMHfoFosXtXBMnaS0ZnIJ9ST4b4NqLVKDEm8QBxoNNGNaBv2JNF6XNA=="],
+
+ "node-fetch-native": ["node-fetch-native@1.6.7", "", {}, "sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q=="],
+
+ "node-releases": ["node-releases@2.0.27", "", {}, "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA=="],
+
+ "normalize-path": ["normalize-path@3.0.0", "", {}, "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="],
+
+ "nypm": ["nypm@0.5.4", "", { "dependencies": { "citty": "^0.1.6", "consola": "^3.4.0", "pathe": "^2.0.3", "pkg-types": "^1.3.1", "tinyexec": "^0.3.2", "ufo": "^1.5.4" }, "bin": "dist/cli.mjs" }, "sha512-X0SNNrZiGU8/e/zAB7sCTtdxWTMSIO73q+xuKgglm2Yvzwlo8UoC5FNySQFCvl84uPaeADkqHUZUkWy4aH4xOA=="],
+
+ "ohash": ["ohash@1.1.6", "", {}, "sha512-TBu7PtV8YkAZn0tSxobKY2n2aAQva936lhRrj6957aDaCf9IEtqsKbgMzXE/F/sjqYOwmrukeORHNLe5glk7Cg=="],
+
+ "open": ["open@10.1.2", "", { "dependencies": { "default-browser": "^5.2.1", "define-lazy-prop": "^3.0.0", "is-inside-container": "^1.0.0", "is-wsl": "^3.1.0" } }, "sha512-cxN6aIDPz6rm8hbebcP7vrQNhvRcveZoJU72Y7vskh4oIm+BZwBECnx5nTmrlres1Qapvx27Qo1Auukpf8PKXw=="],
+
+ "pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="],
+
+ "perfect-debounce": ["perfect-debounce@1.0.0", "", {}, "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA=="],
+
+ "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
+
+ "picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
+
+ "pkg-types": ["pkg-types@1.3.1", "", { "dependencies": { "confbox": "^0.1.8", "mlly": "^1.7.4", "pathe": "^2.0.1" } }, "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ=="],
+
+ "playwright": ["playwright@1.58.0", "", { "dependencies": { "playwright-core": "1.58.0" }, "optionalDependencies": { "fsevents": "2.3.2" }, "bin": { "playwright": "cli.js" } }, "sha512-2SVA0sbPktiIY/MCOPX8e86ehA/e+tDNq+e5Y8qjKYti2Z/JG7xnronT/TXTIkKbYGWlCbuucZ6dziEgkoEjQQ=="],
+
+ "playwright-core": ["playwright-core@1.58.0", "", { "bin": { "playwright-core": "cli.js" } }, "sha512-aaoB1RWrdNi3//rOeKuMiS65UCcgOVljU46At6eFcOFPFHWtd2weHRRow6z/n+Lec0Lvu0k9ZPKJSjPugikirw=="],
+
+ "postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
+
+ "prettier": ["prettier@3.8.0", "", { "bin": "bin/prettier.cjs" }, "sha512-yEPsovQfpxYfgWNhCfECjG5AQaO+K3dp6XERmOepyPDVqcJm+bjyCVO3pmU+nAPe0N5dDvekfGezt/EIiRe1TA=="],
+
+ "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
+
+ "rc9": ["rc9@2.1.2", "", { "dependencies": { "defu": "^6.1.4", "destr": "^2.0.3" } }, "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg=="],
+
+ "react": ["react@19.2.3", "", {}, "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA=="],
+
+ "react-dom": ["react-dom@19.2.3", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.3" } }, "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg=="],
+
+ "react-error-boundary": ["react-error-boundary@6.1.0", "", { "peerDependencies": { "react": "^18.0.0 || ^19.0.0" } }, "sha512-02k9WQ/mUhdbXir0tC1NiMesGzRPaCsJEWU/4bcFrbY1YMZOtHShtZP6zw0SJrBWA/31H0KT9/FgdL8+sPKgHA=="],
+
+ "react-hook-form": ["react-hook-form@7.71.1", "", { "peerDependencies": { "react": "^16.8.0 || ^17 || ^18 || ^19" } }, "sha512-9SUJKCGKo8HUSsCO+y0CtqkqI5nNuaDqTxyqPsZPqIwudpj4rCrAz/jZV+jn57bx5gtZKOh3neQu94DXMc+w5w=="],
+
+ "react-icons": ["react-icons@5.5.0", "", { "peerDependencies": { "react": "*" } }, "sha512-MEFcXdkP3dLo8uumGI5xN3lDFNsRtrjbOEKDLD7yv76v4wpnEq2Lt2qeHaQOr34I/wPN3s3+N08WkQ+CW37Xiw=="],
+
+ "react-remove-scroll": ["react-remove-scroll@2.7.2", "", { "dependencies": { "react-remove-scroll-bar": "^2.3.7", "react-style-singleton": "^2.2.3", "tslib": "^2.1.0", "use-callback-ref": "^1.3.3", "use-sidecar": "^1.1.3" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q=="],
+
+ "react-remove-scroll-bar": ["react-remove-scroll-bar@2.3.8", "", { "dependencies": { "react-style-singleton": "^2.2.2", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q=="],
+
+ "react-style-singleton": ["react-style-singleton@2.2.3", "", { "dependencies": { "get-nonce": "^1.0.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ=="],
+
+ "readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
+
+ "recast": ["recast@0.23.11", "", { "dependencies": { "ast-types": "^0.16.1", "esprima": "~4.0.0", "source-map": "~0.6.1", "tiny-invariant": "^1.3.3", "tslib": "^2.0.1" } }, "sha512-YTUo+Flmw4ZXiWfQKGcwwc11KnoRAYgzAE2E7mXKCjSviTKShtxBsN6YUUBB2gtaBzKzeKunxhUwNHQuRryhWA=="],
+
+ "resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="],
+
+ "rollup": ["rollup@4.55.2", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.55.2", "@rollup/rollup-android-arm64": "4.55.2", "@rollup/rollup-darwin-arm64": "4.55.2", "@rollup/rollup-darwin-x64": "4.55.2", "@rollup/rollup-freebsd-arm64": "4.55.2", "@rollup/rollup-freebsd-x64": "4.55.2", "@rollup/rollup-linux-arm-gnueabihf": "4.55.2", "@rollup/rollup-linux-arm-musleabihf": "4.55.2", "@rollup/rollup-linux-arm64-gnu": "4.55.2", "@rollup/rollup-linux-arm64-musl": "4.55.2", "@rollup/rollup-linux-loong64-gnu": "4.55.2", "@rollup/rollup-linux-loong64-musl": "4.55.2", "@rollup/rollup-linux-ppc64-gnu": "4.55.2", "@rollup/rollup-linux-ppc64-musl": "4.55.2", "@rollup/rollup-linux-riscv64-gnu": "4.55.2", "@rollup/rollup-linux-riscv64-musl": "4.55.2", "@rollup/rollup-linux-s390x-gnu": "4.55.2", "@rollup/rollup-linux-x64-gnu": "4.55.2", "@rollup/rollup-linux-x64-musl": "4.55.2", "@rollup/rollup-openbsd-x64": "4.55.2", "@rollup/rollup-openharmony-arm64": "4.55.2", "@rollup/rollup-win32-arm64-msvc": "4.55.2", "@rollup/rollup-win32-ia32-msvc": "4.55.2", "@rollup/rollup-win32-x64-gnu": "4.55.2", "@rollup/rollup-win32-x64-msvc": "4.55.2", "fsevents": "~2.3.2" }, "bin": "dist/bin/rollup" }, "sha512-PggGy4dhwx5qaW+CKBilA/98Ql9keyfnb7lh4SR6shQ91QQQi1ORJ1v4UinkdP2i87OBs9AQFooQylcrrRfIcg=="],
+
+ "run-applescript": ["run-applescript@7.1.0", "", {}, "sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q=="],
+
+ "scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
+
+ "semver": ["semver@6.3.1", "", { "bin": "bin/semver.js" }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="],
+
+ "seroval": ["seroval@1.4.2", "", {}, "sha512-N3HEHRCZYn3cQbsC4B5ldj9j+tHdf4JZoYPlcI4rRYu0Xy4qN8MQf1Z08EibzB0WpgRG5BGK08FTrmM66eSzKQ=="],
+
+ "seroval-plugins": ["seroval-plugins@1.4.2", "", { "peerDependencies": { "seroval": "^1.0" } }, "sha512-X7p4MEDTi+60o2sXZ4bnDBhgsUYDSkQEvzYZuJyFqWg9jcoPsHts5nrg5O956py2wyt28lUrBxk0M0/wU8URpA=="],
+
+ "sonner": ["sonner@2.0.7", "", { "peerDependencies": { "react": "^18.0.0 || ^19.0.0 || ^19.0.0-rc", "react-dom": "^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, "sha512-W6ZN4p58k8aDKA4XPcx2hpIQXBRAgyiWVkYhT7CvK6D3iAu7xjvVyhQHg2/iaKJZ1XVJ4r7XuwGL+WGEK37i9w=="],
+
+ "source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
+
+ "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
+
+ "tailwind-merge": ["tailwind-merge@3.4.0", "", {}, "sha512-uSaO4gnW+b3Y2aWoWfFpX62vn2sR3skfhbjsEnaBI81WD1wBLlHZe5sWf0AqjksNdYTbGBEd0UasQMT3SNV15g=="],
+
+ "tailwindcss": ["tailwindcss@4.1.18", "", {}, "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw=="],
+
+ "tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="],
+
+ "tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="],
+
+ "tiny-invariant": ["tiny-invariant@1.3.3", "", {}, "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg=="],
+
+ "tiny-warning": ["tiny-warning@1.0.3", "", {}, "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA=="],
+
+ "tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
+
+ "tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="],
+
+ "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
+
+ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
+
+ "tsx": ["tsx@4.21.0", "", { "dependencies": { "esbuild": "~0.27.0", "get-tsconfig": "^4.7.5" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "bin": "dist/cli.mjs" }, "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw=="],
+
+ "tw-animate-css": ["tw-animate-css@1.4.0", "", {}, "sha512-7bziOlRqH0hJx80h/3mbicLW7o8qLsH5+RaLR2t+OHM3D0JlWGODQKQ4cxbK7WlvmUxpcj6Kgu6EKqjrGFe3QQ=="],
+
+ "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
+
+ "ufo": ["ufo@1.6.3", "", {}, "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q=="],
+
+ "uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
+
+ "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
+
+ "unplugin": ["unplugin@2.3.11", "", { "dependencies": { "@jridgewell/remapping": "^2.3.5", "acorn": "^8.15.0", "picomatch": "^4.0.3", "webpack-virtual-modules": "^0.6.2" } }, "sha512-5uKD0nqiYVzlmCRs01Fhs2BdkEgBS3SAVP6ndrBsuK42iC2+JHyxM05Rm9G8+5mkmRtzMZGY8Ct5+mliZxU/Ww=="],
+
+ "update-browserslist-db": ["update-browserslist-db@1.2.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": "cli.js" }, "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w=="],
+
+ "use-callback-ref": ["use-callback-ref@1.3.3", "", { "dependencies": { "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg=="],
+
+ "use-sidecar": ["use-sidecar@1.1.3", "", { "dependencies": { "detect-node-es": "^1.1.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ=="],
+
+ "use-sync-external-store": ["use-sync-external-store@1.6.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w=="],
+
+ "vite": ["vite@7.3.1", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["less", "sass", "sass-embedded", "stylus", "sugarss", "terser", "yaml"], "bin": "bin/vite.js" }, "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA=="],
+
+ "webpack-virtual-modules": ["webpack-virtual-modules@0.6.2", "", {}, "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ=="],
+
+ "wordwrap": ["wordwrap@1.0.0", "", {}, "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q=="],
+
+ "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
+
+ "zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
+
+ "@radix-ui/react-arrow/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-checkbox/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-checkbox/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-collection/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-collection/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-collection/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-dialog/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-dialog/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-dialog/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-dismissable-layer/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-dropdown-menu/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-dropdown-menu/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-focus-scope/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-menu/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-menu/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-menu/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-popper/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-popper/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-portal/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-radio-group/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-radio-group/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-roving-focus/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-roving-focus/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-scroll-area/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-scroll-area/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-select/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-select/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-select/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-tabs/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-tabs/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-tooltip/@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA=="],
+
+ "@radix-ui/react-tooltip/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@radix-ui/react-tooltip/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-visually-hidden/@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ=="],
+
+ "@tanstack/react-router/@tanstack/router-core": ["@tanstack/router-core@1.157.3", "", { "dependencies": { "@tanstack/history": "1.154.14", "@tanstack/store": "^0.8.0", "cookie-es": "^2.0.0", "seroval": "^1.4.2", "seroval-plugins": "^1.4.2", "tiny-invariant": "^1.3.3", "tiny-warning": "^1.0.3" } }, "sha512-r2KY/UWC4Ocxx05G7b/tLNQ7ZGX7URvA5H5P1cNbkFmi77VbOgtbW0sfz9/+9Dyh6aqHVK/Bx5kuR5jojNvrHQ=="],
+
+ "@tanstack/router-core/@tanstack/history": ["@tanstack/history@1.153.2", "", {}, "sha512-TVa0Wju5w6JZGq/S74Q7TQNtKXDatJaB4NYrhMZVU9ETlkgpr35NhDfOzsCJ93P0KCo1ZoDodlFp3c54/dLsyw=="],
+
+ "@tanstack/router-devtools-core/@tanstack/router-core": ["@tanstack/router-core@1.157.3", "", { "dependencies": { "@tanstack/history": "1.154.14", "@tanstack/store": "^0.8.0", "cookie-es": "^2.0.0", "seroval": "^1.4.2", "seroval-plugins": "^1.4.2", "tiny-invariant": "^1.3.3", "tiny-warning": "^1.0.3" } }, "sha512-r2KY/UWC4Ocxx05G7b/tLNQ7ZGX7URvA5H5P1cNbkFmi77VbOgtbW0sfz9/+9Dyh6aqHVK/Bx5kuR5jojNvrHQ=="],
+
+ "@tanstack/router-generator/source-map": ["source-map@0.7.6", "", {}, "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ=="],
+
+ "@tanstack/router-generator/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
+
+ "@tanstack/router-plugin/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
+
+ "@tanstack/router-utils/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
+
+ "anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
+
+ "c12/chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
+
+ "c12/dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="],
+
+ "fs-minipass/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
+
+ "giget/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
+
+ "lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
+
+ "minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
+
+ "mlly/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
+
+ "nypm/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
+
+ "pkg-types/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
+
+ "playwright/fsevents": ["fsevents@2.3.2", "", { "os": "darwin" }, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="],
+
+ "readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
+
+ "@radix-ui/react-arrow/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-checkbox/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-dismissable-layer/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-dropdown-menu/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-focus-scope/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-popper/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-portal/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-radio-group/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-roving-focus/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-scroll-area/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-tabs/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "@radix-ui/react-visually-hidden/@radix-ui/react-primitive/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
+
+ "c12/chokidar/readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
+ }
+}
diff --git a/compose.override.yml b/compose.override.yml
new file mode 100644
index 0000000000..779cc8238d
--- /dev/null
+++ b/compose.override.yml
@@ -0,0 +1,135 @@
+services:
+
+ # Local services are available on their ports, but also available on:
+ # http://api.localhost.tiangolo.com: backend
+ # http://dashboard.localhost.tiangolo.com: frontend
+ # etc. To enable it, update .env, set:
+ # DOMAIN=localhost.tiangolo.com
+ proxy:
+ image: traefik:3.6
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+ ports:
+ - "80:80"
+ - "8090:8080"
+ # Duplicate the command from compose.yml to add --api.insecure=true
+ command:
+ # Enable Docker in Traefik, so that it reads labels from Docker services
+ - --providers.docker
+ # Add a constraint to only use services with the label for this stack
+ - --providers.docker.constraints=Label(`traefik.constraint-label`, `traefik-public`)
+ # Do not expose all Docker services, only the ones explicitly exposed
+ - --providers.docker.exposedbydefault=false
+ # Create an entrypoint "http" listening on port 80
+ - --entrypoints.http.address=:80
+ # Create an entrypoint "https" listening on port 443
+ - --entrypoints.https.address=:443
+ # Enable the access log, with HTTP requests
+ - --accesslog
+ # Enable the Traefik log, for configurations and errors
+ - --log
+ # Enable debug logging for local development
+ - --log.level=DEBUG
+ # Enable the Dashboard and API
+ - --api
+ # Enable the Dashboard and API in insecure mode for local development
+ - --api.insecure=true
+ labels:
+ # Enable Traefik for this service, to make it available in the public network
+ - traefik.enable=true
+ - traefik.constraint-label=traefik-public
+ # Dummy https-redirect middleware that doesn't really redirect, only to
+ # allow running it locally
+ - traefik.http.middlewares.https-redirect.contenttype.autodetect=false
+ networks:
+ - traefik-public
+ - default
+
+ db:
+ restart: "no"
+ ports:
+ - "5432:5432"
+
+ adminer:
+ restart: "no"
+ ports:
+ - "8080:8080"
+
+ backend:
+ restart: "no"
+ ports:
+ - "8000:8000"
+ build:
+ context: .
+ dockerfile: backend/Dockerfile
+ # command: sleep infinity # Infinite loop to keep container alive doing nothing
+ command:
+ - fastapi
+ - run
+ - --reload
+ - "app/main.py"
+ develop:
+ watch:
+ - path: ./backend
+ action: sync
+ target: /app/backend
+ ignore:
+ - ./backend/.venv
+ - .venv
+ - path: ./backend/pyproject.toml
+ action: rebuild
+ # TODO: remove once coverage is done locally
+ volumes:
+ - ./backend/htmlcov:/app/backend/htmlcov
+ environment:
+ SMTP_HOST: "mailcatcher"
+ SMTP_PORT: "1025"
+ SMTP_TLS: "false"
+ EMAILS_FROM_EMAIL: "noreply@example.com"
+
+ mailcatcher:
+ image: schickling/mailcatcher
+ ports:
+ - "1080:1080"
+ - "1025:1025"
+
+ frontend:
+ restart: "no"
+ ports:
+ - "5173:80"
+ build:
+ context: .
+ dockerfile: frontend/Dockerfile
+ args:
+ - VITE_API_URL=http://localhost:8000
+ - NODE_ENV=development
+
+ playwright:
+ build:
+ context: .
+ dockerfile: frontend/Dockerfile.playwright
+ args:
+ - VITE_API_URL=http://backend:8000
+ - NODE_ENV=production
+ ipc: host
+ depends_on:
+ - backend
+ - mailcatcher
+ env_file:
+ - .env
+ environment:
+ - VITE_API_URL=http://backend:8000
+ - MAILCATCHER_HOST=http://mailcatcher:1080
+ # For the reports when run locally
+ - PLAYWRIGHT_HTML_HOST=0.0.0.0
+ - CI=${CI}
+ volumes:
+ - ./frontend/blob-report:/app/frontend/blob-report
+ - ./frontend/test-results:/app/frontend/test-results
+ ports:
+ - 9323:9323
+
+networks:
+ traefik-public:
+ # For local dev, don't expect an external Traefik network
+ external: false
diff --git a/compose.traefik.yml b/compose.traefik.yml
new file mode 100644
index 0000000000..bcd7d142ca
--- /dev/null
+++ b/compose.traefik.yml
@@ -0,0 +1,77 @@
+services:
+ traefik:
+ image: traefik:3.6
+ ports:
+ # Listen on port 80, default for HTTP, necessary to redirect to HTTPS
+ - 80:80
+ # Listen on port 443, default for HTTPS
+ - 443:443
+ restart: always
+ labels:
+ # Enable Traefik for this service, to make it available in the public network
+ - traefik.enable=true
+ # Use the traefik-public network (declared below)
+ - traefik.docker.network=traefik-public
+ # Define the port inside of the Docker service to use
+ - traefik.http.services.traefik-dashboard.loadbalancer.server.port=8080
+ # Make Traefik use this domain (from an environment variable) in HTTP
+ - traefik.http.routers.traefik-dashboard-http.entrypoints=http
+ - traefik.http.routers.traefik-dashboard-http.rule=Host(`traefik.${DOMAIN?Variable not set}`)
+ # traefik-https the actual router using HTTPS
+ - traefik.http.routers.traefik-dashboard-https.entrypoints=https
+ - traefik.http.routers.traefik-dashboard-https.rule=Host(`traefik.${DOMAIN?Variable not set}`)
+ - traefik.http.routers.traefik-dashboard-https.tls=true
+ # Use the "le" (Let's Encrypt) resolver created below
+ - traefik.http.routers.traefik-dashboard-https.tls.certresolver=le
+ # Use the special Traefik service api@internal with the web UI/Dashboard
+ - traefik.http.routers.traefik-dashboard-https.service=api@internal
+ # https-redirect middleware to redirect HTTP to HTTPS
+ - traefik.http.middlewares.https-redirect.redirectscheme.scheme=https
+ - traefik.http.middlewares.https-redirect.redirectscheme.permanent=true
+ # traefik-http set up only to use the middleware to redirect to https
+ - traefik.http.routers.traefik-dashboard-http.middlewares=https-redirect
+ # admin-auth middleware with HTTP Basic auth
+ # Using the environment variables USERNAME and HASHED_PASSWORD
+ - traefik.http.middlewares.admin-auth.basicauth.users=${USERNAME?Variable not set}:${HASHED_PASSWORD?Variable not set}
+ # Enable HTTP Basic auth, using the middleware created above
+ - traefik.http.routers.traefik-dashboard-https.middlewares=admin-auth
+ volumes:
+ # Add Docker as a mounted volume, so that Traefik can read the labels of other services
+ - /var/run/docker.sock:/var/run/docker.sock:ro
+ # Mount the volume to store the certificates
+ - traefik-public-certificates:/certificates
+ command:
+ # Enable Docker in Traefik, so that it reads labels from Docker services
+ - --providers.docker
+ # Do not expose all Docker services, only the ones explicitly exposed
+ - --providers.docker.exposedbydefault=false
+ # Create an entrypoint "http" listening on port 80
+ - --entrypoints.http.address=:80
+ # Create an entrypoint "https" listening on port 443
+ - --entrypoints.https.address=:443
+ # Create the certificate resolver "le" for Let's Encrypt, uses the environment variable EMAIL
+ - --certificatesresolvers.le.acme.email=${EMAIL?Variable not set}
+ # Store the Let's Encrypt certificates in the mounted volume
+ - --certificatesresolvers.le.acme.storage=/certificates/acme.json
+ # Use the TLS Challenge for Let's Encrypt
+ - --certificatesresolvers.le.acme.tlschallenge=true
+ # Enable the access log, with HTTP requests
+ - --accesslog
+ # Enable the Traefik log, for configurations and errors
+ - --log
+ # Enable the Dashboard and API
+ - --api
+ networks:
+ # Use the public network created to be shared between Traefik and
+ # any other service that needs to be publicly available with HTTPS
+ - traefik-public
+
+volumes:
+ # Create a volume to store the certificates, even if the container is recreated
+ traefik-public-certificates:
+
+networks:
+ # Use the previously created public network "traefik-public", shared with other
+ # services that need to be publicly available via this Traefik
+ traefik-public:
+ external: true
diff --git a/compose.yml b/compose.yml
new file mode 100644
index 0000000000..2488fc007b
--- /dev/null
+++ b/compose.yml
@@ -0,0 +1,174 @@
+services:
+
+ db:
+ image: postgres:18
+ restart: always
+ healthcheck:
+ test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
+ interval: 10s
+ retries: 5
+ start_period: 30s
+ timeout: 10s
+ volumes:
+ - app-db-data:/var/lib/postgresql/data/pgdata
+ env_file:
+ - .env
+ environment:
+ - PGDATA=/var/lib/postgresql/data/pgdata
+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD?Variable not set}
+ - POSTGRES_USER=${POSTGRES_USER?Variable not set}
+ - POSTGRES_DB=${POSTGRES_DB?Variable not set}
+
+ adminer:
+ image: adminer
+ restart: always
+ networks:
+ - traefik-public
+ - default
+ depends_on:
+ - db
+ environment:
+ - ADMINER_DESIGN=pepa-linha-dark
+ labels:
+ - traefik.enable=true
+ - traefik.docker.network=traefik-public
+ - traefik.constraint-label=traefik-public
+ - traefik.http.routers.${STACK_NAME?Variable not set}-adminer-http.rule=Host(`adminer.${DOMAIN?Variable not set}`)
+ - traefik.http.routers.${STACK_NAME?Variable not set}-adminer-http.entrypoints=http
+ - traefik.http.routers.${STACK_NAME?Variable not set}-adminer-http.middlewares=https-redirect
+ - traefik.http.routers.${STACK_NAME?Variable not set}-adminer-https.rule=Host(`adminer.${DOMAIN?Variable not set}`)
+ - traefik.http.routers.${STACK_NAME?Variable not set}-adminer-https.entrypoints=https
+ - traefik.http.routers.${STACK_NAME?Variable not set}-adminer-https.tls=true
+ - traefik.http.routers.${STACK_NAME?Variable not set}-adminer-https.tls.certresolver=le
+ - traefik.http.services.${STACK_NAME?Variable not set}-adminer.loadbalancer.server.port=8080
+
+ prestart:
+ image: '${DOCKER_IMAGE_BACKEND?Variable not set}:${TAG-latest}'
+ build:
+ context: .
+ dockerfile: backend/Dockerfile
+ networks:
+ - traefik-public
+ - default
+ depends_on:
+ db:
+ condition: service_healthy
+ restart: true
+ command: bash scripts/prestart.sh
+ env_file:
+ - .env
+ environment:
+ - DOMAIN=${DOMAIN}
+ - FRONTEND_HOST=${FRONTEND_HOST?Variable not set}
+ - ENVIRONMENT=${ENVIRONMENT}
+ - BACKEND_CORS_ORIGINS=${BACKEND_CORS_ORIGINS}
+ - SECRET_KEY=${SECRET_KEY?Variable not set}
+ - FIRST_SUPERUSER=${FIRST_SUPERUSER?Variable not set}
+ - FIRST_SUPERUSER_PASSWORD=${FIRST_SUPERUSER_PASSWORD?Variable not set}
+ - SMTP_HOST=${SMTP_HOST}
+ - SMTP_USER=${SMTP_USER}
+ - SMTP_PASSWORD=${SMTP_PASSWORD}
+ - EMAILS_FROM_EMAIL=${EMAILS_FROM_EMAIL}
+ - POSTGRES_SERVER=db
+ - POSTGRES_PORT=${POSTGRES_PORT}
+ - POSTGRES_DB=${POSTGRES_DB}
+ - POSTGRES_USER=${POSTGRES_USER?Variable not set}
+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD?Variable not set}
+ - SENTRY_DSN=${SENTRY_DSN}
+
+ backend:
+ image: '${DOCKER_IMAGE_BACKEND?Variable not set}:${TAG-latest}'
+ restart: always
+ networks:
+ - traefik-public
+ - default
+ depends_on:
+ db:
+ condition: service_healthy
+ restart: true
+ prestart:
+ condition: service_completed_successfully
+ env_file:
+ - .env
+ environment:
+ - DOMAIN=${DOMAIN}
+ - FRONTEND_HOST=${FRONTEND_HOST?Variable not set}
+ - ENVIRONMENT=${ENVIRONMENT}
+ - BACKEND_CORS_ORIGINS=${BACKEND_CORS_ORIGINS}
+ - SECRET_KEY=${SECRET_KEY?Variable not set}
+ - FIRST_SUPERUSER=${FIRST_SUPERUSER?Variable not set}
+ - FIRST_SUPERUSER_PASSWORD=${FIRST_SUPERUSER_PASSWORD?Variable not set}
+ - SMTP_HOST=${SMTP_HOST}
+ - SMTP_USER=${SMTP_USER}
+ - SMTP_PASSWORD=${SMTP_PASSWORD}
+ - EMAILS_FROM_EMAIL=${EMAILS_FROM_EMAIL}
+ - POSTGRES_SERVER=db
+ - POSTGRES_PORT=${POSTGRES_PORT}
+ - POSTGRES_DB=${POSTGRES_DB}
+ - POSTGRES_USER=${POSTGRES_USER?Variable not set}
+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD?Variable not set}
+ - SENTRY_DSN=${SENTRY_DSN}
+
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8000/api/v1/utils/health-check/"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+
+ build:
+ context: .
+ dockerfile: backend/Dockerfile
+ labels:
+ - traefik.enable=true
+ - traefik.docker.network=traefik-public
+ - traefik.constraint-label=traefik-public
+
+ - traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=8000
+
+ - traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=Host(`api.${DOMAIN?Variable not set}`)
+ - traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.entrypoints=http
+
+ - traefik.http.routers.${STACK_NAME?Variable not set}-backend-https.rule=Host(`api.${DOMAIN?Variable not set}`)
+ - traefik.http.routers.${STACK_NAME?Variable not set}-backend-https.entrypoints=https
+ - traefik.http.routers.${STACK_NAME?Variable not set}-backend-https.tls=true
+ - traefik.http.routers.${STACK_NAME?Variable not set}-backend-https.tls.certresolver=le
+
+ # Enable redirection for HTTP and HTTPS
+ - traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.middlewares=https-redirect
+
+ frontend:
+ image: '${DOCKER_IMAGE_FRONTEND?Variable not set}:${TAG-latest}'
+ restart: always
+ networks:
+ - traefik-public
+ - default
+ build:
+ context: .
+ dockerfile: frontend/Dockerfile
+ args:
+ - VITE_API_URL=https://api.${DOMAIN?Variable not set}
+ - NODE_ENV=production
+ labels:
+ - traefik.enable=true
+ - traefik.docker.network=traefik-public
+ - traefik.constraint-label=traefik-public
+
+ - traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80
+
+ - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=Host(`dashboard.${DOMAIN?Variable not set}`)
+ - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.entrypoints=http
+
+ - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-https.rule=Host(`dashboard.${DOMAIN?Variable not set}`)
+ - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-https.entrypoints=https
+ - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-https.tls=true
+ - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-https.tls.certresolver=le
+
+ # Enable redirection for HTTP and HTTPS
+ - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.middlewares=https-redirect
+volumes:
+ app-db-data:
+
+networks:
+ traefik-public:
+ # Allow setting it to false for testing
+ external: true
diff --git a/cookiecutter.json b/cookiecutter.json
deleted file mode 100644
index fc0e6fab00..0000000000
--- a/cookiecutter.json
+++ /dev/null
@@ -1,44 +0,0 @@
-{
- "project_name": "Base Project",
- "project_slug": "{{ cookiecutter.project_name|lower|replace(' ', '-') }}",
- "domain_main": "{{cookiecutter.project_slug}}.com",
- "domain_staging": "stag.{{cookiecutter.domain_main}}",
-
- "docker_swarm_stack_name_main": "{{cookiecutter.domain_main|replace('.', '-')}}",
- "docker_swarm_stack_name_staging": "{{cookiecutter.domain_staging|replace('.', '-')}}",
-
- "secret_key": "changethis",
- "first_superuser": "admin@{{cookiecutter.domain_main}}",
- "first_superuser_password": "changethis",
- "backend_cors_origins": "[\"http://localhost\", \"http://localhost:4200\", \"http://localhost:3000\", \"http://localhost:8080\", \"https://localhost\", \"https://localhost:4200\", \"https://localhost:3000\", \"https://localhost:8080\", \"http://dev.{{cookiecutter.domain_main}}\", \"https://{{cookiecutter.domain_staging}}\", \"https://{{cookiecutter.domain_main}}\", \"http://local.dockertoolbox.tiangolo.com\", \"http://localhost.tiangolo.com\"]",
- "smtp_port": "587",
- "smtp_host": "",
- "smtp_user": "",
- "smtp_password": "",
- "smtp_emails_from_email": "info@{{cookiecutter.domain_main}}",
-
- "postgres_password": "changethis",
- "pgadmin_default_user": "{{cookiecutter.first_superuser}}",
- "pgadmin_default_user_password": "{{cookiecutter.first_superuser_password}}",
-
- "traefik_constraint_tag": "{{cookiecutter.domain_main}}",
- "traefik_constraint_tag_staging": "{{cookiecutter.domain_staging}}",
- "traefik_public_constraint_tag": "traefik-public",
-
- "flower_auth": "admin:{{cookiecutter.first_superuser_password}}",
-
- "sentry_dsn": "",
-
- "docker_image_prefix": "",
-
- "docker_image_backend": "{{cookiecutter.docker_image_prefix}}backend",
- "docker_image_celeryworker": "{{cookiecutter.docker_image_prefix}}celeryworker",
- "docker_image_frontend": "{{cookiecutter.docker_image_prefix}}frontend",
-
- "_copy_without_render": [
- "frontend/src/**/*.html",
- "frontend/src/**/*.vue",
- "frontend/node_modules/*",
- "backend/app/app/email-templates/**"
- ]
-}
diff --git a/copier.yml b/copier.yml
new file mode 100644
index 0000000000..f98e3fc861
--- /dev/null
+++ b/copier.yml
@@ -0,0 +1,100 @@
+project_name:
+ type: str
+ help: The name of the project, shown to API users (in .env)
+ default: FastAPI Project
+
+stack_name:
+ type: str
+ help: The name of the stack used for Docker Compose labels (no spaces) (in .env)
+ default: fastapi-project
+
+secret_key:
+ type: str
+ help: |
+ 'The secret key for the project, used for security,
+ stored in .env, you can generate one with:
+ python -c "import secrets; print(secrets.token_urlsafe(32))"'
+ default: changethis
+
+first_superuser:
+ type: str
+ help: The email of the first superuser (in .env)
+ default: admin@example.com
+
+first_superuser_password:
+ type: str
+ help: The password of the first superuser (in .env)
+ default: changethis
+
+smtp_host:
+ type: str
+ help: The SMTP server host to send emails, you can set it later in .env
+ default: ""
+
+smtp_user:
+ type: str
+ help: The SMTP server user to send emails, you can set it later in .env
+ default: ""
+
+smtp_password:
+ type: str
+ help: The SMTP server password to send emails, you can set it later in .env
+ default: ""
+
+emails_from_email:
+ type: str
+ help: The email account to send emails from, you can set it later in .env
+ default: info@example.com
+
+postgres_password:
+ type: str
+ help: |
+ 'The password for the PostgreSQL database, stored in .env,
+ you can generate one with:
+ python -c "import secrets; print(secrets.token_urlsafe(32))"'
+ default: changethis
+
+sentry_dsn:
+ type: str
+ help: The DSN for Sentry, if you are using it, you can set it later in .env
+ default: ""
+
+_exclude:
+ # Global
+ - .vscode
+ - .mypy_cache
+ # Python
+ - __pycache__
+ - app.egg-info
+ - "*.pyc"
+ - .mypy_cache
+ - .coverage
+ - htmlcov
+ - .cache
+ - .venv
+ # Frontend
+ # Logs
+ - logs
+ - "*.log"
+ - npm-debug.log*
+ - yarn-debug.log*
+ - yarn-error.log*
+ - pnpm-debug.log*
+ - lerna-debug.log*
+ - node_modules
+ - dist
+ - dist-ssr
+ - "*.local"
+ # Editor directories and files
+ - .idea
+ - .DS_Store
+ - "*.suo"
+ - "*.ntvs*"
+ - "*.njsproj"
+ - "*.sln"
+ - "*.sw?"
+
+_answers_file: .copier/.copier-answers.yml
+
+_tasks:
+ - ["{{ _copier_python }}", .copier/update_dotenv.py]
diff --git a/deployment.md b/deployment.md
new file mode 100644
index 0000000000..4b8ebc1988
--- /dev/null
+++ b/deployment.md
@@ -0,0 +1,344 @@
+# FastAPI Project - Deployment
+
+You can deploy the project using Docker Compose to a remote server.
+
+This project expects you to have a Traefik proxy handling communication to the outside world and HTTPS certificates.
+
+You can use CI/CD (continuous integration and continuous deployment) systems to deploy automatically, there are already configurations to do it with GitHub Actions.
+
+But you have to configure a couple things first. ๐ค
+
+## Preparation
+
+* Have a remote server ready and available.
+* Configure the DNS records of your domain to point to the IP of the server you just created.
+* Configure a wildcard subdomain for your domain, so that you can have multiple subdomains for different services, e.g. `*.fastapi-project.example.com`. This will be useful for accessing different components, like `dashboard.fastapi-project.example.com`, `api.fastapi-project.example.com`, `traefik.fastapi-project.example.com`, `adminer.fastapi-project.example.com`, etc. And also for `staging`, like `dashboard.staging.fastapi-project.example.com`, `adminer.staging.fastapi-project.example.com`, etc.
+* Install and configure [Docker](https://docs.docker.com/engine/install/) on the remote server (Docker Engine, not Docker Desktop).
+
+## Public Traefik
+
+We need a Traefik proxy to handle incoming connections and HTTPS certificates.
+
+You need to do these next steps only once.
+
+### Traefik Docker Compose
+
+* Create a remote directory to store your Traefik Docker Compose file:
+
+```bash
+mkdir -p /root/code/traefik-public/
+```
+
+Copy the Traefik Docker Compose file to your server. You could do it by running the command `rsync` in your local terminal:
+
+```bash
+rsync -a compose.traefik.yml root@your-server.example.com:/root/code/traefik-public/
+```
+
+### Traefik Public Network
+
+This Traefik will expect a Docker "public network" named `traefik-public` to communicate with your stack(s).
+
+This way, there will be a single public Traefik proxy that handles the communication (HTTP and HTTPS) with the outside world, and then behind that, you could have one or more stacks with different domains, even if they are on the same single server.
+
+To create a Docker "public network" named `traefik-public` run the following command in your remote server:
+
+```bash
+docker network create traefik-public
+```
+
+### Traefik Environment Variables
+
+The Traefik Docker Compose file expects some environment variables to be set in your terminal before starting it. You can do it by running the following commands in your remote server.
+
+* Create the username for HTTP Basic Auth, e.g.:
+
+```bash
+export USERNAME=admin
+```
+
+* Create an environment variable with the password for HTTP Basic Auth, e.g.:
+
+```bash
+export PASSWORD=changethis
+```
+
+* Use openssl to generate the "hashed" version of the password for HTTP Basic Auth and store it in an environment variable:
+
+```bash
+export HASHED_PASSWORD=$(openssl passwd -apr1 $PASSWORD)
+```
+
+To verify that the hashed password is correct, you can print it:
+
+```bash
+echo $HASHED_PASSWORD
+```
+
+* Create an environment variable with the domain name for your server, e.g.:
+
+```bash
+export DOMAIN=fastapi-project.example.com
+```
+
+* Create an environment variable with the email for Let's Encrypt, e.g.:
+
+```bash
+export EMAIL=admin@example.com
+```
+
+**Note**: you need to set a different email, an email `@example.com` won't work.
+
+### Start the Traefik Docker Compose
+
+Go to the directory where you copied the Traefik Docker Compose file in your remote server:
+
+```bash
+cd /root/code/traefik-public/
+```
+
+Now with the environment variables set and the `compose.traefik.yml` in place, you can start the Traefik Docker Compose running the following command:
+
+```bash
+docker compose -f compose.traefik.yml up -d
+```
+
+## Deploy the FastAPI Project
+
+Now that you have Traefik in place you can deploy your FastAPI project with Docker Compose.
+
+**Note**: You might want to jump ahead to the section about Continuous Deployment with GitHub Actions.
+
+## Copy the Code
+
+```bash
+rsync -av --filter=":- .gitignore" ./ root@your-server.example.com:/root/code/app/
+```
+
+Note: `--filter=":- .gitignore"` tells `rsync` to use the same rules as git, ignore files ignored by git, like the Python virtual environment.
+
+## Environment Variables
+
+You need to set some environment variables first.
+
+### Generate secret keys
+
+Some environment variables in the `.env` file have a default value of `changethis`.
+
+You have to change them with a secret key, to generate secret keys you can run the following command:
+
+```bash
+python -c "import secrets; print(secrets.token_urlsafe(32))"
+```
+
+Copy the content and use that as password / secret key. And run that again to generate another secure key.
+
+### Required Environment Variables
+
+Set the `ENVIRONMENT`, by default `local` (for development), but when deploying to a server you would put something like `staging` or `production`:
+
+```bash
+export ENVIRONMENT=production
+```
+
+Set the `DOMAIN`, by default `localhost` (for development), but when deploying you would use your own domain, for example:
+
+```bash
+export DOMAIN=fastapi-project.example.com
+```
+
+Set the `POSTGRES_PASSWORD` to something different than `changethis`:
+
+```bash
+export POSTGRES_PASSWORD="changethis"
+```
+
+Set the `SECRET_KEY`, used to sign tokens:
+
+```bash
+export SECRET_KEY="changethis"
+```
+
+Note: you can use the Python command above to generate a secure secret key.
+
+Set the `FIRST_SUPER_USER_PASSWORD` to something different than `changethis`:
+
+```bash
+export FIRST_SUPERUSER_PASSWORD="changethis"
+```
+
+Set the `BACKEND_CORS_ORIGINS` to include your domain:
+
+```bash
+export BACKEND_CORS_ORIGINS="https://dashboard.${DOMAIN?Variable not set},https://api.${DOMAIN?Variable not set}"
+```
+
+You can set several other environment variables:
+
+* `PROJECT_NAME`: The name of the project, used in the API for the docs and emails.
+* `STACK_NAME`: The name of the stack used for Docker Compose labels and project name, this should be different for `staging`, `production`, etc. You could use the same domain replacing dots with dashes, e.g. `fastapi-project-example-com` and `staging-fastapi-project-example-com`.
+* `BACKEND_CORS_ORIGINS`: A list of allowed CORS origins separated by commas.
+* `FIRST_SUPERUSER`: The email of the first superuser, this superuser will be the one that can create new users.
+* `SMTP_HOST`: The SMTP server host to send emails, this would come from your email provider (E.g. Mailgun, Sparkpost, Sendgrid, etc).
+* `SMTP_USER`: The SMTP server user to send emails.
+* `SMTP_PASSWORD`: The SMTP server password to send emails.
+* `EMAILS_FROM_EMAIL`: The email account to send emails from.
+* `POSTGRES_SERVER`: The hostname of the PostgreSQL server. You can leave the default of `db`, provided by the same Docker Compose. You normally wouldn't need to change this unless you are using a third-party provider.
+* `POSTGRES_PORT`: The port of the PostgreSQL server. You can leave the default. You normally wouldn't need to change this unless you are using a third-party provider.
+* `POSTGRES_USER`: The Postgres user, you can leave the default.
+* `POSTGRES_DB`: The database name to use for this application. You can leave the default of `app`.
+* `SENTRY_DSN`: The DSN for Sentry, if you are using it.
+
+## GitHub Actions Environment Variables
+
+There are some environment variables only used by GitHub Actions that you can configure:
+
+* `LATEST_CHANGES`: Used by the GitHub Action [latest-changes](https://github.com/tiangolo/latest-changes) to automatically add release notes based on the PRs merged. It's a personal access token, read the docs for details.
+* `SMOKESHOW_AUTH_KEY`: Used to handle and publish the code coverage using [Smokeshow](https://github.com/samuelcolvin/smokeshow), follow their instructions to create a (free) Smokeshow key.
+
+### Deploy with Docker Compose
+
+With the environment variables in place, you can deploy with Docker Compose:
+
+```bash
+cd /root/code/app/
+docker compose -f compose.yml build
+docker compose -f compose.yml up -d
+```
+
+For production you wouldn't want to have the overrides in `compose.override.yml`, that's why we explicitly specify `compose.yml` as the file to use.
+
+## Continuous Deployment (CD)
+
+You can use GitHub Actions to deploy your project automatically. ๐
+
+You can have multiple environment deployments.
+
+There are already two environments configured, `staging` and `production`. ๐
+
+### Install GitHub Actions Runner
+
+* On your remote server, create a user for your GitHub Actions:
+
+```bash
+sudo adduser github
+```
+
+* Add Docker permissions to the `github` user:
+
+```bash
+sudo usermod -aG docker github
+```
+
+* Temporarily switch to the `github` user:
+
+```bash
+sudo su - github
+```
+
+* Go to the `github` user's home directory:
+
+```bash
+cd
+```
+
+* [Install a GitHub Action self-hosted runner following the official guide](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/adding-self-hosted-runners#adding-a-self-hosted-runner-to-a-repository).
+
+* When asked about labels, add a label for the environment, e.g. `production`. You can also add labels later.
+
+After installing, the guide would tell you to run a command to start the runner. Nevertheless, it would stop once you terminate that process or if your local connection to your server is lost.
+
+To make sure it runs on startup and continues running, you can install it as a service. To do that, exit the `github` user and go back to the `root` user:
+
+```bash
+exit
+```
+
+After you do it, you will be on the previous user again. And you will be on the previous directory, belonging to that user.
+
+Before being able to go the `github` user directory, you need to become the `root` user (you might already be):
+
+```bash
+sudo su
+```
+
+* As the `root` user, go to the `actions-runner` directory inside of the `github` user's home directory:
+
+```bash
+cd /home/github/actions-runner
+```
+
+* Install the self-hosted runner as a service with the user `github`:
+
+```bash
+./svc.sh install github
+```
+
+* Start the service:
+
+```bash
+./svc.sh start
+```
+
+* Check the status of the service:
+
+```bash
+./svc.sh status
+```
+
+You can read more about it in the official guide: [Configuring the self-hosted runner application as a service](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/configuring-the-self-hosted-runner-application-as-a-service).
+
+### Set Secrets
+
+On your repository, configure secrets for the environment variables you need, the same ones described above, including `SECRET_KEY`, etc. Follow the [official GitHub guide for setting repository secrets](https://docs.github.com/en/actions/security-guides/using-secrets-in-github-actions#creating-secrets-for-a-repository).
+
+The current Github Actions workflows expect these secrets:
+
+* `DOMAIN_PRODUCTION`
+* `DOMAIN_STAGING`
+* `STACK_NAME_PRODUCTION`
+* `STACK_NAME_STAGING`
+* `EMAILS_FROM_EMAIL`
+* `FIRST_SUPERUSER`
+* `FIRST_SUPERUSER_PASSWORD`
+* `POSTGRES_PASSWORD`
+* `SECRET_KEY`
+* `LATEST_CHANGES`
+* `SMOKESHOW_AUTH_KEY`
+
+## GitHub Action Deployment Workflows
+
+There are GitHub Action workflows in the `.github/workflows` directory already configured for deploying to the environments (GitHub Actions runners with the labels):
+
+* `staging`: after pushing (or merging) to the branch `master`.
+* `production`: after publishing a release.
+
+If you need to add extra environments you could use those as a starting point.
+
+## URLs
+
+Replace `fastapi-project.example.com` with your domain.
+
+### Main Traefik Dashboard
+
+Traefik UI: `https://traefik.fastapi-project.example.com`
+
+### Production
+
+Frontend: `https://dashboard.fastapi-project.example.com`
+
+Backend API docs: `https://api.fastapi-project.example.com/docs`
+
+Backend API base URL: `https://api.fastapi-project.example.com`
+
+Adminer: `https://adminer.fastapi-project.example.com`
+
+### Staging
+
+Frontend: `https://dashboard.staging.fastapi-project.example.com`
+
+Backend API docs: `https://api.staging.fastapi-project.example.com/docs`
+
+Backend API base URL: `https://api.staging.fastapi-project.example.com`
+
+Adminer: `https://adminer.staging.fastapi-project.example.com`
diff --git a/development.md b/development.md
new file mode 100644
index 0000000000..7879ffcdbc
--- /dev/null
+++ b/development.md
@@ -0,0 +1,221 @@
+# FastAPI Project - Development
+
+## Docker Compose
+
+* Start the local stack with Docker Compose:
+
+```bash
+docker compose watch
+```
+
+* Now you can open your browser and interact with these URLs:
+
+Frontend, built with Docker, with routes handled based on the path:
+
+Backend, JSON based web API based on OpenAPI:
+
+Automatic interactive documentation with Swagger UI (from the OpenAPI backend):
+
+Adminer, database web administration:
+
+Traefik UI, to see how the routes are being handled by the proxy:
+
+**Note**: The first time you start your stack, it might take a minute for it to be ready. While the backend waits for the database to be ready and configures everything. You can check the logs to monitor it.
+
+To check the logs, run (in another terminal):
+
+```bash
+docker compose logs
+```
+
+To check the logs of a specific service, add the name of the service, e.g.:
+
+```bash
+docker compose logs backend
+```
+
+## Mailcatcher
+
+Mailcatcher is a simple SMTP server that catches all emails sent by the backend during local development. Instead of sending real emails, they are captured and displayed in a web interface.
+
+This is useful for:
+
+* Testing email functionality during development
+* Verifying email content and formatting
+* Debugging email-related functionality without sending real emails
+
+The backend is automatically configured to use Mailcatcher when running with Docker Compose locally (SMTP on port 1025). All captured emails can be viewed at .
+
+## Local Development
+
+The Docker Compose files are configured so that each of the services is available in a different port in `localhost`.
+
+For the backend and frontend, they use the same port that would be used by their local development server, so, the backend is at `http://localhost:8000` and the frontend at `http://localhost:5173`.
+
+This way, you could turn off a Docker Compose service and start its local development service, and everything would keep working, because it all uses the same ports.
+
+For example, you can stop that `frontend` service in the Docker Compose, in another terminal, run:
+
+```bash
+docker compose stop frontend
+```
+
+And then start the local frontend development server:
+
+```bash
+bun run dev
+```
+
+Or you could stop the `backend` Docker Compose service:
+
+```bash
+docker compose stop backend
+```
+
+And then you can run the local development server for the backend:
+
+```bash
+cd backend
+fastapi dev app/main.py
+```
+
+## Docker Compose in `localhost.tiangolo.com`
+
+When you start the Docker Compose stack, it uses `localhost` by default, with different ports for each service (backend, frontend, adminer, etc).
+
+When you deploy it to production (or staging), it will deploy each service in a different subdomain, like `api.example.com` for the backend and `dashboard.example.com` for the frontend.
+
+In the guide about [deployment](deployment.md) you can read about Traefik, the configured proxy. That's the component in charge of transmitting traffic to each service based on the subdomain.
+
+If you want to test that it's all working locally, you can edit the local `.env` file, and change:
+
+```dotenv
+DOMAIN=localhost.tiangolo.com
+```
+
+That will be used by the Docker Compose files to configure the base domain for the services.
+
+Traefik will use this to transmit traffic at `api.localhost.tiangolo.com` to the backend, and traffic at `dashboard.localhost.tiangolo.com` to the frontend.
+
+The domain `localhost.tiangolo.com` is a special domain that is configured (with all its subdomains) to point to `127.0.0.1`. This way you can use that for your local development.
+
+After you update it, run again:
+
+```bash
+docker compose watch
+```
+
+When deploying, for example in production, the main Traefik is configured outside of the Docker Compose files. For local development, there's an included Traefik in `compose.override.yml`, just to let you test that the domains work as expected, for example with `api.localhost.tiangolo.com` and `dashboard.localhost.tiangolo.com`.
+
+## Docker Compose files and env vars
+
+There is a main `compose.yml` file with all the configurations that apply to the whole stack, it is used automatically by `docker compose`.
+
+And there's also a `compose.override.yml` with overrides for development, for example to mount the source code as a volume. It is used automatically by `docker compose` to apply overrides on top of `compose.yml`.
+
+These Docker Compose files use the `.env` file containing configurations to be injected as environment variables in the containers.
+
+They also use some additional configurations taken from environment variables set in the scripts before calling the `docker compose` command.
+
+After changing variables, make sure you restart the stack:
+
+```bash
+docker compose watch
+```
+
+## The .env file
+
+The `.env` file is the one that contains all your configurations, generated keys and passwords, etc.
+
+Depending on your workflow, you could want to exclude it from Git, for example if your project is public. In that case, you would have to make sure to set up a way for your CI tools to obtain it while building or deploying your project.
+
+One way to do it could be to add each environment variable to your CI/CD system, and updating the `compose.yml` file to read that specific env var instead of reading the `.env` file.
+
+## Pre-commits and code linting
+
+we are using a tool called [prek](https://prek.j178.dev/) (modern alternative to [Pre-commit](https://pre-commit.com/)) for code linting and formatting.
+
+When you install it, it runs right before making a commit in git. This way it ensures that the code is consistent and formatted even before it is committed.
+
+You can find a file `.pre-commit-config.yaml` with configurations at the root of the project.
+
+#### Install prek to run automatically
+
+`prek` is already part of the dependencies of the project.
+
+After having the `prek` tool installed and available, you need to "install" it in the local repository, so that it runs automatically before each commit.
+
+Using `uv`, you could do it with (make sure you are inside `backend` folder):
+
+```bash
+โฏ uv run prek install -f
+prek installed at `../.git/hooks/pre-commit`
+```
+
+The `-f` flag forces the installation, in case there was already a `pre-commit` hook previously installed.
+
+Now whenever you try to commit, e.g. with:
+
+```bash
+git commit
+```
+
+...prek will run and check and format the code you are about to commit, and will ask you to add that code (stage it) with git again before committing.
+
+Then you can `git add` the modified/fixed files again and now you can commit.
+
+#### Running prek hooks manually
+
+you can also run `prek` manually on all the files, you can do it using `uv` with:
+
+```bash
+โฏ uv run prek run --all-files
+check for added large files..............................................Passed
+check toml...............................................................Passed
+check yaml...............................................................Passed
+fix end of files.........................................................Passed
+trim trailing whitespace.................................................Passed
+ruff.....................................................................Passed
+ruff-format..............................................................Passed
+biome check..............................................................Passed
+```
+
+## URLs
+
+The production or staging URLs would use these same paths, but with your own domain.
+
+### Development URLs
+
+Development URLs, for local development.
+
+Frontend:
+
+Backend:
+
+Automatic Interactive Docs (Swagger UI):
+
+Automatic Alternative Docs (ReDoc):
+
+Adminer:
+
+Traefik UI:
+
+MailCatcher:
+
+### Development URLs with `localhost.tiangolo.com` Configured
+
+Development URLs, for local development.
+
+Frontend:
+
+Backend:
+
+Automatic Interactive Docs (Swagger UI):
+
+Automatic Alternative Docs (ReDoc):
+
+Adminer:
+
+Traefik UI:
+
+MailCatcher:
diff --git a/{{cookiecutter.project_slug}}/frontend/.dockerignore b/frontend/.dockerignore
old mode 100755
new mode 100644
similarity index 72%
rename from {{cookiecutter.project_slug}}/frontend/.dockerignore
rename to frontend/.dockerignore
index 3c3629e647..f06235c460
--- a/{{cookiecutter.project_slug}}/frontend/.dockerignore
+++ b/frontend/.dockerignore
@@ -1 +1,2 @@
node_modules
+dist
diff --git a/frontend/.env b/frontend/.env
new file mode 100644
index 0000000000..27fcbfe8c8
--- /dev/null
+++ b/frontend/.env
@@ -0,0 +1,2 @@
+VITE_API_URL=http://localhost:8000
+MAILCATCHER_HOST=http://localhost:1080
diff --git a/frontend/.gitignore b/frontend/.gitignore
new file mode 100644
index 0000000000..093ec6dcbd
--- /dev/null
+++ b/frontend/.gitignore
@@ -0,0 +1,30 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+pnpm-debug.log*
+lerna-debug.log*
+
+node_modules
+dist
+dist-ssr
+*.local
+openapi.json
+
+# Editor directories and files
+.vscode/*
+!.vscode/extensions.json
+.idea
+.DS_Store
+*.suo
+*.ntvs*
+*.njsproj
+*.sln
+*.sw?
+/test-results/
+/playwright-report/
+/blob-report/
+/playwright/.cache/
+/playwright/.auth/
diff --git a/frontend/Dockerfile b/frontend/Dockerfile
new file mode 100644
index 0000000000..e9bae4085b
--- /dev/null
+++ b/frontend/Dockerfile
@@ -0,0 +1,26 @@
+# Stage 0, "build-stage", based on Bun, to build and compile the frontend
+FROM oven/bun:1 AS build-stage
+
+WORKDIR /app
+
+COPY package.json bun.lock /app/
+
+COPY frontend/package.json /app/frontend/
+
+WORKDIR /app/frontend
+
+RUN bun install
+
+COPY ./frontend /app/frontend
+ARG VITE_API_URL
+
+RUN bun run build
+
+
+# Stage 1, based on Nginx, to have only the compiled app, ready for production with Nginx
+FROM nginx:1
+
+COPY --from=build-stage /app/frontend/dist/ /usr/share/nginx/html
+
+COPY ./frontend/nginx.conf /etc/nginx/conf.d/default.conf
+COPY ./frontend/nginx-backend-not-found.conf /etc/nginx/extra-conf.d/backend-not-found.conf
diff --git a/frontend/Dockerfile.playwright b/frontend/Dockerfile.playwright
new file mode 100644
index 0000000000..0ae955c7e8
--- /dev/null
+++ b/frontend/Dockerfile.playwright
@@ -0,0 +1,21 @@
+FROM mcr.microsoft.com/playwright:v1.58.0-noble
+
+WORKDIR /app
+
+RUN apt-get update && apt-get install -y unzip \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN curl -fsSL https://bun.sh/install | bash
+ENV PATH="/root/.bun/bin:$PATH"
+
+COPY package.json bun.lock /app/
+
+COPY frontend/package.json /app/frontend/
+
+WORKDIR /app/frontend
+
+RUN bun install
+
+COPY ./frontend /app/frontend
+
+ARG VITE_API_URL
diff --git a/frontend/README.md b/frontend/README.md
new file mode 100644
index 0000000000..7b50d58b3f
--- /dev/null
+++ b/frontend/README.md
@@ -0,0 +1,121 @@
+# FastAPI Project - Frontend
+
+The frontend is built with [Vite](https://vitejs.dev/), [React](https://reactjs.org/), [TypeScript](https://www.typescriptlang.org/), [TanStack Query](https://tanstack.com/query), [TanStack Router](https://tanstack.com/router) and [Tailwind CSS](https://tailwindcss.com/).
+
+## Requirements
+
+- [Bun](https://bun.sh/) (recommended) or [Node.js](https://nodejs.org/)
+
+## Quick Start
+
+```bash
+bun install
+bun run dev
+```
+
+* Then open your browser at http://localhost:5173/.
+
+Notice that this live server is not running inside Docker, it's for local development, and that is the recommended workflow. Once you are happy with your frontend, you can build the frontend Docker image and start it, to test it in a production-like environment. But building the image at every change will not be as productive as running the local development server with live reload.
+
+Check the file `package.json` to see other available options.
+
+### Removing the frontend
+
+If you are developing an API-only app and want to remove the frontend, you can do it easily:
+
+* Remove the `./frontend` directory.
+
+* In the `compose.yml` file, remove the whole service / section `frontend`.
+
+* In the `compose.override.yml` file, remove the whole service / section `frontend` and `playwright`.
+
+Done, you have a frontend-less (api-only) app. ๐ค
+
+---
+
+If you want, you can also remove the `FRONTEND` environment variables from:
+
+* `.env`
+* `./scripts/*.sh`
+
+But it would be only to clean them up, leaving them won't really have any effect either way.
+
+## Generate Client
+
+### Automatically
+
+* Activate the backend virtual environment.
+* From the top level project directory, run the script:
+
+```bash
+bash ./scripts/generate-client.sh
+```
+
+* Commit the changes.
+
+### Manually
+
+* Start the Docker Compose stack.
+
+* Download the OpenAPI JSON file from `http://localhost/api/v1/openapi.json` and copy it to a new file `openapi.json` at the root of the `frontend` directory.
+
+* To generate the frontend client, run:
+
+```bash
+bun run generate-client
+```
+
+* Commit the changes.
+
+Notice that everytime the backend changes (changing the OpenAPI schema), you should follow these steps again to update the frontend client.
+
+## Using a Remote API
+
+If you want to use a remote API, you can set the environment variable `VITE_API_URL` to the URL of the remote API. For example, you can set it in the `frontend/.env` file:
+
+```env
+VITE_API_URL=https://api.my-domain.example.com
+```
+
+Then, when you run the frontend, it will use that URL as the base URL for the API.
+
+## Code Structure
+
+The frontend code is structured as follows:
+
+* `frontend/src` - The main frontend code.
+* `frontend/src/assets` - Static assets.
+* `frontend/src/client` - The generated OpenAPI client.
+* `frontend/src/components` - The different components of the frontend.
+* `frontend/src/hooks` - Custom hooks.
+* `frontend/src/routes` - The different routes of the frontend which include the pages.
+
+## End-to-End Testing with Playwright
+
+The frontend includes initial end-to-end tests using Playwright. To run the tests, you need to have the Docker Compose stack running. Start the stack with the following command:
+
+```bash
+docker compose up -d --wait backend
+```
+
+Then, you can run the tests with the following command:
+
+```bash
+bunx playwright test
+```
+
+You can also run your tests in UI mode to see the browser and interact with it running:
+
+```bash
+bunx playwright test --ui
+```
+
+To stop and remove the Docker Compose stack and clean the data created in tests, use the following command:
+
+```bash
+docker compose down -v
+```
+
+To update the tests, navigate to the tests directory and modify the existing test files or add new ones as needed.
+
+For more information on writing and running Playwright tests, refer to the official [Playwright documentation](https://playwright.dev/docs/intro).
diff --git a/frontend/biome.json b/frontend/biome.json
new file mode 100644
index 0000000000..10b20a244a
--- /dev/null
+++ b/frontend/biome.json
@@ -0,0 +1,46 @@
+{
+ "$schema": "https://biomejs.dev/schemas/2.3.12/schema.json",
+ "assist": { "actions": { "source": { "organizeImports": "on" } } },
+ "files": {
+ "includes": [
+ "**",
+ "!**/dist/**/*",
+ "!**/node_modules/**/*",
+ "!**/src/routeTree.gen.ts",
+ "!**/src/client/**/*",
+ "!**/src/components/ui/**/*",
+ "!**/playwright-report",
+ "!**/playwright.config.ts"
+ ]
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": true,
+ "suspicious": {
+ "noExplicitAny": "off",
+ "noArrayIndexKey": "off"
+ },
+ "style": {
+ "noNonNullAssertion": "off",
+ "noParameterAssign": "error",
+ "useSelfClosingElements": "error",
+ "noUselessElse": "error"
+ }
+ }
+ },
+ "formatter": {
+ "indentStyle": "space"
+ },
+ "javascript": {
+ "formatter": {
+ "quoteStyle": "double",
+ "semicolons": "asNeeded"
+ }
+ },
+ "css": {
+ "parser": {
+ "tailwindDirectives": true
+ }
+ }
+}
diff --git a/frontend/components.json b/frontend/components.json
new file mode 100644
index 0000000000..2b0833f097
--- /dev/null
+++ b/frontend/components.json
@@ -0,0 +1,22 @@
+{
+ "$schema": "https://ui.shadcn.com/schema.json",
+ "style": "new-york",
+ "rsc": false,
+ "tsx": true,
+ "tailwind": {
+ "config": "",
+ "css": "src/index.css",
+ "baseColor": "neutral",
+ "cssVariables": true,
+ "prefix": ""
+ },
+ "iconLibrary": "lucide",
+ "aliases": {
+ "components": "@/components",
+ "utils": "@/lib/utils",
+ "ui": "@/components/ui",
+ "lib": "@/lib",
+ "hooks": "@/hooks"
+ },
+ "registries": {}
+}
diff --git a/frontend/index.html b/frontend/index.html
new file mode 100644
index 0000000000..57621a268b
--- /dev/null
+++ b/frontend/index.html
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+ Full Stack FastAPI Project
+
+
+
+
+
+
+
diff --git a/{{cookiecutter.project_slug}}/frontend/nginx-backend-not-found.conf b/frontend/nginx-backend-not-found.conf
similarity index 100%
rename from {{cookiecutter.project_slug}}/frontend/nginx-backend-not-found.conf
rename to frontend/nginx-backend-not-found.conf
diff --git a/frontend/nginx.conf b/frontend/nginx.conf
new file mode 100644
index 0000000000..ba4d9aad6c
--- /dev/null
+++ b/frontend/nginx.conf
@@ -0,0 +1,11 @@
+server {
+ listen 80;
+
+ location / {
+ root /usr/share/nginx/html;
+ index index.html index.htm;
+ try_files $uri /index.html =404;
+ }
+
+ include /etc/nginx/extra-conf.d/*.conf;
+}
diff --git a/frontend/openapi-ts.config.ts b/frontend/openapi-ts.config.ts
new file mode 100644
index 0000000000..b5a69e20eb
--- /dev/null
+++ b/frontend/openapi-ts.config.ts
@@ -0,0 +1,33 @@
+import { defineConfig } from "@hey-api/openapi-ts"
+
+export default defineConfig({
+ input: "./openapi.json",
+ output: "./src/client",
+
+ plugins: [
+ "legacy/axios",
+ {
+ name: "@hey-api/sdk",
+ // NOTE: this doesn't allow tree-shaking
+ asClass: true,
+ operationId: true,
+ classNameBuilder: "{{name}}Service",
+ methodNameBuilder: (operation) => {
+ // @ts-expect-error
+ let name: string = operation.name
+ // @ts-expect-error
+ const service: string = operation.service
+
+ if (service && name.toLowerCase().startsWith(service.toLowerCase())) {
+ name = name.slice(service.length)
+ }
+
+ return name.charAt(0).toLowerCase() + name.slice(1)
+ },
+ },
+ {
+ name: "@hey-api/schemas",
+ type: "json",
+ },
+ ],
+})
diff --git a/frontend/package.json b/frontend/package.json
new file mode 100644
index 0000000000..0040e7ff03
--- /dev/null
+++ b/frontend/package.json
@@ -0,0 +1,66 @@
+{
+ "name": "frontend",
+ "private": true,
+ "version": "0.0.0",
+ "type": "module",
+ "scripts": {
+ "dev": "vite",
+ "build": "tsc -p tsconfig.build.json && vite build",
+ "lint": "biome check --write --unsafe --no-errors-on-unmatched --files-ignore-unknown=true ./",
+ "preview": "vite preview",
+ "generate-client": "openapi-ts",
+ "test": "bunx playwright test",
+ "test:ui": "bunx playwright test --ui"
+ },
+ "dependencies": {
+ "@hookform/resolvers": "^5.2.2",
+ "@radix-ui/react-avatar": "^1.1.11",
+ "@radix-ui/react-checkbox": "^1.3.3",
+ "@radix-ui/react-dialog": "^1.1.15",
+ "@radix-ui/react-dropdown-menu": "^2.1.16",
+ "@radix-ui/react-label": "^2.1.8",
+ "@radix-ui/react-radio-group": "^1.3.8",
+ "@radix-ui/react-scroll-area": "^1.2.10",
+ "@radix-ui/react-select": "^2.2.6",
+ "@radix-ui/react-separator": "^1.1.8",
+ "@radix-ui/react-slot": "^1.2.4",
+ "@radix-ui/react-tabs": "^1.1.13",
+ "@radix-ui/react-tooltip": "^1.2.8",
+ "@tailwindcss/vite": "^4.1.18",
+ "@tanstack/react-query": "^5.90.12",
+ "@tanstack/react-query-devtools": "^5.91.1",
+ "@tanstack/react-router": "^1.157.3",
+ "@tanstack/react-router-devtools": "^1.156.0",
+ "@tanstack/react-table": "^8.21.3",
+ "axios": "1.13.4",
+ "class-variance-authority": "^0.7.1",
+ "clsx": "^2.1.1",
+ "form-data": "4.0.5",
+ "lucide-react": "^0.562.0",
+ "next-themes": "^0.4.6",
+ "react": "^19.1.1",
+ "react-dom": "^19.2.3",
+ "react-error-boundary": "^6.0.0",
+ "react-hook-form": "^7.68.0",
+ "react-icons": "^5.5.0",
+ "sonner": "^2.0.7",
+ "tailwind-merge": "^3.4.0",
+ "tailwindcss": "^4.1.17",
+ "zod": "^4.3.6"
+ },
+ "devDependencies": {
+ "@biomejs/biome": "^2.3.12",
+ "@hey-api/openapi-ts": "0.73.0",
+ "@playwright/test": "1.58.0",
+ "@tanstack/router-devtools": "^1.157.17",
+ "@tanstack/router-plugin": "^1.140.0",
+ "@types/node": "^25.1.0",
+ "@types/react": "^19.2.7",
+ "@types/react-dom": "^19.2.3",
+ "@vitejs/plugin-react-swc": "^4.2.2",
+ "dotenv": "^17.2.3",
+ "tw-animate-css": "^1.4.0",
+ "typescript": "^5.9.3",
+ "vite": "^7.3.0"
+ }
+}
diff --git a/frontend/playwright.config.ts b/frontend/playwright.config.ts
new file mode 100644
index 0000000000..36f03d9919
--- /dev/null
+++ b/frontend/playwright.config.ts
@@ -0,0 +1,91 @@
+import { defineConfig, devices } from '@playwright/test';
+import 'dotenv/config'
+
+/**
+ * Read environment variables from file.
+ * https://github.com/motdotla/dotenv
+ */
+
+/**
+ * See https://playwright.dev/docs/test-configuration.
+ */
+export default defineConfig({
+ testDir: './tests',
+ /* Run tests in files in parallel */
+ fullyParallel: true,
+ /* Fail the build on CI if you accidentally left test.only in the source code. */
+ forbidOnly: !!process.env.CI,
+ /* Retry on CI only */
+ retries: process.env.CI ? 2 : 0,
+ /* Opt out of parallel tests on CI. */
+ workers: process.env.CI ? 1 : undefined,
+ /* Reporter to use. See https://playwright.dev/docs/test-reporters */
+ reporter: process.env.CI ? 'blob' : 'html',
+ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
+ use: {
+ /* Base URL to use in actions like `await page.goto('/')`. */
+ baseURL: 'http://localhost:5173',
+
+ /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
+ trace: 'on-first-retry',
+ },
+
+ /* Configure projects for major browsers */
+ projects: [
+ { name: 'setup', testMatch: /.*\.setup\.ts/ },
+
+ {
+ name: 'chromium',
+ use: {
+ ...devices['Desktop Chrome'],
+ storageState: 'playwright/.auth/user.json',
+ },
+ dependencies: ['setup'],
+ },
+
+ // {
+ // name: 'firefox',
+ // use: {
+ // ...devices['Desktop Firefox'],
+ // storageState: 'playwright/.auth/user.json',
+ // },
+ // dependencies: ['setup'],
+ // },
+
+ // {
+ // name: 'webkit',
+ // use: {
+ // ...devices['Desktop Safari'],
+ // storageState: 'playwright/.auth/user.json',
+ // },
+ // dependencies: ['setup'],
+ // },
+
+ /* Test against mobile viewports. */
+ // {
+ // name: 'Mobile Chrome',
+ // use: { ...devices['Pixel 5'] },
+ // },
+ // {
+ // name: 'Mobile Safari',
+ // use: { ...devices['iPhone 12'] },
+ // },
+
+ /* Test against branded browsers. */
+ // {
+ // name: 'Microsoft Edge',
+ // use: { ...devices['Desktop Edge'], channel: 'msedge' },
+ // },
+ // {
+ // name: 'Google Chrome',
+ // use: { ...devices['Desktop Chrome'], channel: 'chrome' },
+ // },
+ ],
+
+ /* Run your local dev server before starting the tests */
+ webServer: {
+ command: 'bun run dev',
+ url: 'http://localhost:5173',
+ reuseExistingServer: !process.env.CI,
+ },
+});
diff --git a/frontend/public/assets/images/fastapi-icon-light.svg b/frontend/public/assets/images/fastapi-icon-light.svg
new file mode 100644
index 0000000000..d069c7247c
--- /dev/null
+++ b/frontend/public/assets/images/fastapi-icon-light.svg
@@ -0,0 +1,77 @@
+
+
diff --git a/frontend/public/assets/images/fastapi-icon.svg b/frontend/public/assets/images/fastapi-icon.svg
new file mode 100644
index 0000000000..df93a70260
--- /dev/null
+++ b/frontend/public/assets/images/fastapi-icon.svg
@@ -0,0 +1,77 @@
+
+
diff --git a/frontend/public/assets/images/fastapi-logo-light.svg b/frontend/public/assets/images/fastapi-logo-light.svg
new file mode 100644
index 0000000000..1a84b986ea
--- /dev/null
+++ b/frontend/public/assets/images/fastapi-logo-light.svg
@@ -0,0 +1,83 @@
+
+
diff --git a/frontend/public/assets/images/fastapi-logo.svg b/frontend/public/assets/images/fastapi-logo.svg
new file mode 100644
index 0000000000..c90d25232f
--- /dev/null
+++ b/frontend/public/assets/images/fastapi-logo.svg
@@ -0,0 +1,91 @@
+
+
diff --git a/frontend/public/assets/images/favicon.png b/frontend/public/assets/images/favicon.png
new file mode 100644
index 0000000000..e5b7c3ada7
Binary files /dev/null and b/frontend/public/assets/images/favicon.png differ
diff --git a/frontend/src/client/core/ApiError.ts b/frontend/src/client/core/ApiError.ts
new file mode 100644
index 0000000000..36675d288a
--- /dev/null
+++ b/frontend/src/client/core/ApiError.ts
@@ -0,0 +1,21 @@
+import type { ApiRequestOptions } from './ApiRequestOptions';
+import type { ApiResult } from './ApiResult';
+
+export class ApiError extends Error {
+ public readonly url: string;
+ public readonly status: number;
+ public readonly statusText: string;
+ public readonly body: unknown;
+ public readonly request: ApiRequestOptions;
+
+ constructor(request: ApiRequestOptions, response: ApiResult, message: string) {
+ super(message);
+
+ this.name = 'ApiError';
+ this.url = response.url;
+ this.status = response.status;
+ this.statusText = response.statusText;
+ this.body = response.body;
+ this.request = request;
+ }
+}
\ No newline at end of file
diff --git a/frontend/src/client/core/ApiRequestOptions.ts b/frontend/src/client/core/ApiRequestOptions.ts
new file mode 100644
index 0000000000..939a0aa4c8
--- /dev/null
+++ b/frontend/src/client/core/ApiRequestOptions.ts
@@ -0,0 +1,21 @@
+export type ApiRequestOptions = {
+ readonly body?: any;
+ readonly cookies?: Record;
+ readonly errors?: Record;
+ readonly formData?: Record | any[] | Blob | File;
+ readonly headers?: Record;
+ readonly mediaType?: string;
+ readonly method:
+ | 'DELETE'
+ | 'GET'
+ | 'HEAD'
+ | 'OPTIONS'
+ | 'PATCH'
+ | 'POST'
+ | 'PUT';
+ readonly path?: Record;
+ readonly query?: Record;
+ readonly responseHeader?: string;
+ readonly responseTransformer?: (data: unknown) => Promise;
+ readonly url: string;
+};
\ No newline at end of file
diff --git a/frontend/src/client/core/ApiResult.ts b/frontend/src/client/core/ApiResult.ts
new file mode 100644
index 0000000000..4c58e39138
--- /dev/null
+++ b/frontend/src/client/core/ApiResult.ts
@@ -0,0 +1,7 @@
+export type ApiResult = {
+ readonly body: TData;
+ readonly ok: boolean;
+ readonly status: number;
+ readonly statusText: string;
+ readonly url: string;
+};
\ No newline at end of file
diff --git a/frontend/src/client/core/CancelablePromise.ts b/frontend/src/client/core/CancelablePromise.ts
new file mode 100644
index 0000000000..ccc082e8f2
--- /dev/null
+++ b/frontend/src/client/core/CancelablePromise.ts
@@ -0,0 +1,126 @@
+export class CancelError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = 'CancelError';
+ }
+
+ public get isCancelled(): boolean {
+ return true;
+ }
+}
+
+export interface OnCancel {
+ readonly isResolved: boolean;
+ readonly isRejected: boolean;
+ readonly isCancelled: boolean;
+
+ (cancelHandler: () => void): void;
+}
+
+export class CancelablePromise implements Promise {
+ private _isResolved: boolean;
+ private _isRejected: boolean;
+ private _isCancelled: boolean;
+ readonly cancelHandlers: (() => void)[];
+ readonly promise: Promise;
+ private _resolve?: (value: T | PromiseLike) => void;
+ private _reject?: (reason?: unknown) => void;
+
+ constructor(
+ executor: (
+ resolve: (value: T | PromiseLike) => void,
+ reject: (reason?: unknown) => void,
+ onCancel: OnCancel
+ ) => void
+ ) {
+ this._isResolved = false;
+ this._isRejected = false;
+ this._isCancelled = false;
+ this.cancelHandlers = [];
+ this.promise = new Promise((resolve, reject) => {
+ this._resolve = resolve;
+ this._reject = reject;
+
+ const onResolve = (value: T | PromiseLike): void => {
+ if (this._isResolved || this._isRejected || this._isCancelled) {
+ return;
+ }
+ this._isResolved = true;
+ if (this._resolve) this._resolve(value);
+ };
+
+ const onReject = (reason?: unknown): void => {
+ if (this._isResolved || this._isRejected || this._isCancelled) {
+ return;
+ }
+ this._isRejected = true;
+ if (this._reject) this._reject(reason);
+ };
+
+ const onCancel = (cancelHandler: () => void): void => {
+ if (this._isResolved || this._isRejected || this._isCancelled) {
+ return;
+ }
+ this.cancelHandlers.push(cancelHandler);
+ };
+
+ Object.defineProperty(onCancel, 'isResolved', {
+ get: (): boolean => this._isResolved,
+ });
+
+ Object.defineProperty(onCancel, 'isRejected', {
+ get: (): boolean => this._isRejected,
+ });
+
+ Object.defineProperty(onCancel, 'isCancelled', {
+ get: (): boolean => this._isCancelled,
+ });
+
+ return executor(onResolve, onReject, onCancel as OnCancel);
+ });
+ }
+
+ get [Symbol.toStringTag]() {
+ return "Cancellable Promise";
+ }
+
+ public then(
+ onFulfilled?: ((value: T) => TResult1 | PromiseLike) | null,
+ onRejected?: ((reason: unknown) => TResult2 | PromiseLike) | null
+ ): Promise {
+ return this.promise.then(onFulfilled, onRejected);
+ }
+
+ public catch(
+ onRejected?: ((reason: unknown) => TResult | PromiseLike) | null
+ ): Promise {
+ return this.promise.catch(onRejected);
+ }
+
+ public finally(onFinally?: (() => void) | null): Promise {
+ return this.promise.finally(onFinally);
+ }
+
+ public cancel(): void {
+ if (this._isResolved || this._isRejected || this._isCancelled) {
+ return;
+ }
+ this._isCancelled = true;
+ if (this.cancelHandlers.length) {
+ try {
+ for (const cancelHandler of this.cancelHandlers) {
+ cancelHandler();
+ }
+ } catch (error) {
+ console.warn('Cancellation threw an error', error);
+ return;
+ }
+ }
+ this.cancelHandlers.length = 0;
+ if (this._reject) this._reject(new CancelError('Request aborted'));
+ }
+
+ public get isCancelled(): boolean {
+ return this._isCancelled;
+ }
+}
\ No newline at end of file
diff --git a/frontend/src/client/core/OpenAPI.ts b/frontend/src/client/core/OpenAPI.ts
new file mode 100644
index 0000000000..74f92b4085
--- /dev/null
+++ b/frontend/src/client/core/OpenAPI.ts
@@ -0,0 +1,57 @@
+import type { AxiosRequestConfig, AxiosResponse } from 'axios';
+import type { ApiRequestOptions } from './ApiRequestOptions';
+
+type Headers = Record;
+type Middleware = (value: T) => T | Promise;
+type Resolver = (options: ApiRequestOptions) => Promise;
+
+export class Interceptors {
+ _fns: Middleware[];
+
+ constructor() {
+ this._fns = [];
+ }
+
+ eject(fn: Middleware): void {
+ const index = this._fns.indexOf(fn);
+ if (index !== -1) {
+ this._fns = [...this._fns.slice(0, index), ...this._fns.slice(index + 1)];
+ }
+ }
+
+ use(fn: Middleware): void {
+ this._fns = [...this._fns, fn];
+ }
+}
+
+export type OpenAPIConfig = {
+ BASE: string;
+ CREDENTIALS: 'include' | 'omit' | 'same-origin';
+ ENCODE_PATH?: ((path: string) => string) | undefined;
+ HEADERS?: Headers | Resolver | undefined;
+ PASSWORD?: string | Resolver | undefined;
+ TOKEN?: string | Resolver | undefined;
+ USERNAME?: string | Resolver | undefined;
+ VERSION: string;
+ WITH_CREDENTIALS: boolean;
+ interceptors: {
+ request: Interceptors;
+ response: Interceptors;
+ };
+};
+
+export const OpenAPI: OpenAPIConfig = {
+ BASE: '',
+ CREDENTIALS: 'include',
+ ENCODE_PATH: undefined,
+ HEADERS: undefined,
+ PASSWORD: undefined,
+ TOKEN: undefined,
+ USERNAME: undefined,
+ VERSION: '0.1.0',
+ WITH_CREDENTIALS: false,
+ interceptors: {
+ request: new Interceptors(),
+ response: new Interceptors(),
+ },
+};
\ No newline at end of file
diff --git a/frontend/src/client/core/request.ts b/frontend/src/client/core/request.ts
new file mode 100644
index 0000000000..ecc2e393cd
--- /dev/null
+++ b/frontend/src/client/core/request.ts
@@ -0,0 +1,347 @@
+import axios from 'axios';
+import type { AxiosError, AxiosRequestConfig, AxiosResponse, AxiosInstance } from 'axios';
+
+import { ApiError } from './ApiError';
+import type { ApiRequestOptions } from './ApiRequestOptions';
+import type { ApiResult } from './ApiResult';
+import { CancelablePromise } from './CancelablePromise';
+import type { OnCancel } from './CancelablePromise';
+import type { OpenAPIConfig } from './OpenAPI';
+
+export const isString = (value: unknown): value is string => {
+ return typeof value === 'string';
+};
+
+export const isStringWithValue = (value: unknown): value is string => {
+ return isString(value) && value !== '';
+};
+
+export const isBlob = (value: any): value is Blob => {
+ return value instanceof Blob;
+};
+
+export const isFormData = (value: unknown): value is FormData => {
+ return value instanceof FormData;
+};
+
+export const isSuccess = (status: number): boolean => {
+ return status >= 200 && status < 300;
+};
+
+export const base64 = (str: string): string => {
+ try {
+ return btoa(str);
+ } catch (err) {
+ // @ts-ignore
+ return Buffer.from(str).toString('base64');
+ }
+};
+
+export const getQueryString = (params: Record): string => {
+ const qs: string[] = [];
+
+ const append = (key: string, value: unknown) => {
+ qs.push(`${encodeURIComponent(key)}=${encodeURIComponent(String(value))}`);
+ };
+
+ const encodePair = (key: string, value: unknown) => {
+ if (value === undefined || value === null) {
+ return;
+ }
+
+ if (value instanceof Date) {
+ append(key, value.toISOString());
+ } else if (Array.isArray(value)) {
+ value.forEach(v => encodePair(key, v));
+ } else if (typeof value === 'object') {
+ Object.entries(value).forEach(([k, v]) => encodePair(`${key}[${k}]`, v));
+ } else {
+ append(key, value);
+ }
+ };
+
+ Object.entries(params).forEach(([key, value]) => encodePair(key, value));
+
+ return qs.length ? `?${qs.join('&')}` : '';
+};
+
+const getUrl = (config: OpenAPIConfig, options: ApiRequestOptions): string => {
+ const encoder = config.ENCODE_PATH || encodeURI;
+
+ const path = options.url
+ .replace('{api-version}', config.VERSION)
+ .replace(/{(.*?)}/g, (substring: string, group: string) => {
+ if (options.path?.hasOwnProperty(group)) {
+ return encoder(String(options.path[group]));
+ }
+ return substring;
+ });
+
+ const url = config.BASE + path;
+ return options.query ? url + getQueryString(options.query) : url;
+};
+
+export const getFormData = (options: ApiRequestOptions): FormData | undefined => {
+ if (options.formData) {
+ const formData = new FormData();
+
+ const process = (key: string, value: unknown) => {
+ if (isString(value) || isBlob(value)) {
+ formData.append(key, value);
+ } else {
+ formData.append(key, JSON.stringify(value));
+ }
+ };
+
+ Object.entries(options.formData)
+ .filter(([, value]) => value !== undefined && value !== null)
+ .forEach(([key, value]) => {
+ if (Array.isArray(value)) {
+ value.forEach(v => process(key, v));
+ } else {
+ process(key, value);
+ }
+ });
+
+ return formData;
+ }
+ return undefined;
+};
+
+type Resolver = (options: ApiRequestOptions) => Promise;
+
+export const resolve = async (options: ApiRequestOptions, resolver?: T | Resolver): Promise => {
+ if (typeof resolver === 'function') {
+ return (resolver as Resolver)(options);
+ }
+ return resolver;
+};
+
+export const getHeaders = async (config: OpenAPIConfig, options: ApiRequestOptions): Promise> => {
+ const [token, username, password, additionalHeaders] = await Promise.all([
+ // @ts-ignore
+ resolve(options, config.TOKEN),
+ // @ts-ignore
+ resolve(options, config.USERNAME),
+ // @ts-ignore
+ resolve(options, config.PASSWORD),
+ // @ts-ignore
+ resolve(options, config.HEADERS),
+ ]);
+
+ const headers = Object.entries({
+ Accept: 'application/json',
+ ...additionalHeaders,
+ ...options.headers,
+ })
+ .filter(([, value]) => value !== undefined && value !== null)
+ .reduce((headers, [key, value]) => ({
+ ...headers,
+ [key]: String(value),
+ }), {} as Record);
+
+ if (isStringWithValue(token)) {
+ headers['Authorization'] = `Bearer ${token}`;
+ }
+
+ if (isStringWithValue(username) && isStringWithValue(password)) {
+ const credentials = base64(`${username}:${password}`);
+ headers['Authorization'] = `Basic ${credentials}`;
+ }
+
+ if (options.body !== undefined) {
+ if (options.mediaType) {
+ headers['Content-Type'] = options.mediaType;
+ } else if (isBlob(options.body)) {
+ headers['Content-Type'] = options.body.type || 'application/octet-stream';
+ } else if (isString(options.body)) {
+ headers['Content-Type'] = 'text/plain';
+ } else if (!isFormData(options.body)) {
+ headers['Content-Type'] = 'application/json';
+ }
+ } else if (options.formData !== undefined) {
+ if (options.mediaType) {
+ headers['Content-Type'] = options.mediaType;
+ }
+ }
+
+ return headers;
+};
+
+export const getRequestBody = (options: ApiRequestOptions): unknown => {
+ if (options.body) {
+ return options.body;
+ }
+ return undefined;
+};
+
+export const sendRequest = async (
+ config: OpenAPIConfig,
+ options: ApiRequestOptions,
+ url: string,
+ body: unknown,
+ formData: FormData | undefined,
+ headers: Record,
+ onCancel: OnCancel,
+ axiosClient: AxiosInstance
+): Promise> => {
+ const controller = new AbortController();
+
+ let requestConfig: AxiosRequestConfig = {
+ data: body ?? formData,
+ headers,
+ method: options.method,
+ signal: controller.signal,
+ url,
+ withCredentials: config.WITH_CREDENTIALS,
+ };
+
+ onCancel(() => controller.abort());
+
+ for (const fn of config.interceptors.request._fns) {
+ requestConfig = await fn(requestConfig);
+ }
+
+ try {
+ return await axiosClient.request(requestConfig);
+ } catch (error) {
+ const axiosError = error as AxiosError;
+ if (axiosError.response) {
+ return axiosError.response;
+ }
+ throw error;
+ }
+};
+
+export const getResponseHeader = (response: AxiosResponse, responseHeader?: string): string | undefined => {
+ if (responseHeader) {
+ const content = response.headers[responseHeader];
+ if (isString(content)) {
+ return content;
+ }
+ }
+ return undefined;
+};
+
+export const getResponseBody = (response: AxiosResponse): unknown => {
+ if (response.status !== 204) {
+ return response.data;
+ }
+ return undefined;
+};
+
+export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): void => {
+ const errors: Record = {
+ 400: 'Bad Request',
+ 401: 'Unauthorized',
+ 402: 'Payment Required',
+ 403: 'Forbidden',
+ 404: 'Not Found',
+ 405: 'Method Not Allowed',
+ 406: 'Not Acceptable',
+ 407: 'Proxy Authentication Required',
+ 408: 'Request Timeout',
+ 409: 'Conflict',
+ 410: 'Gone',
+ 411: 'Length Required',
+ 412: 'Precondition Failed',
+ 413: 'Payload Too Large',
+ 414: 'URI Too Long',
+ 415: 'Unsupported Media Type',
+ 416: 'Range Not Satisfiable',
+ 417: 'Expectation Failed',
+ 418: 'Im a teapot',
+ 421: 'Misdirected Request',
+ 422: 'Unprocessable Content',
+ 423: 'Locked',
+ 424: 'Failed Dependency',
+ 425: 'Too Early',
+ 426: 'Upgrade Required',
+ 428: 'Precondition Required',
+ 429: 'Too Many Requests',
+ 431: 'Request Header Fields Too Large',
+ 451: 'Unavailable For Legal Reasons',
+ 500: 'Internal Server Error',
+ 501: 'Not Implemented',
+ 502: 'Bad Gateway',
+ 503: 'Service Unavailable',
+ 504: 'Gateway Timeout',
+ 505: 'HTTP Version Not Supported',
+ 506: 'Variant Also Negotiates',
+ 507: 'Insufficient Storage',
+ 508: 'Loop Detected',
+ 510: 'Not Extended',
+ 511: 'Network Authentication Required',
+ ...options.errors,
+ }
+
+ const error = errors[result.status];
+ if (error) {
+ throw new ApiError(options, result, error);
+ }
+
+ if (!result.ok) {
+ const errorStatus = result.status ?? 'unknown';
+ const errorStatusText = result.statusText ?? 'unknown';
+ const errorBody = (() => {
+ try {
+ return JSON.stringify(result.body, null, 2);
+ } catch (e) {
+ return undefined;
+ }
+ })();
+
+ throw new ApiError(options, result,
+ `Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}`
+ );
+ }
+};
+
+/**
+ * Request method
+ * @param config The OpenAPI configuration object
+ * @param options The request options from the service
+ * @param axiosClient The axios client instance to use
+ * @returns CancelablePromise
+ * @throws ApiError
+ */
+export const request = (config: OpenAPIConfig, options: ApiRequestOptions, axiosClient: AxiosInstance = axios): CancelablePromise => {
+ return new CancelablePromise(async (resolve, reject, onCancel) => {
+ try {
+ const url = getUrl(config, options);
+ const formData = getFormData(options);
+ const body = getRequestBody(options);
+ const headers = await getHeaders(config, options);
+
+ if (!onCancel.isCancelled) {
+ let response = await sendRequest(config, options, url, body, formData, headers, onCancel, axiosClient);
+
+ for (const fn of config.interceptors.response._fns) {
+ response = await fn(response);
+ }
+
+ const responseBody = getResponseBody(response);
+ const responseHeader = getResponseHeader(response, options.responseHeader);
+
+ let transformedBody = responseBody;
+ if (options.responseTransformer && isSuccess(response.status)) {
+ transformedBody = await options.responseTransformer(responseBody)
+ }
+
+ const result: ApiResult = {
+ url,
+ ok: isSuccess(response.status),
+ status: response.status,
+ statusText: response.statusText,
+ body: responseHeader ?? transformedBody,
+ };
+
+ catchErrorCodes(options, result);
+
+ resolve(result.body);
+ }
+ } catch (error) {
+ reject(error);
+ }
+ });
+};
\ No newline at end of file
diff --git a/frontend/src/client/index.ts b/frontend/src/client/index.ts
new file mode 100644
index 0000000000..50a1dd734c
--- /dev/null
+++ b/frontend/src/client/index.ts
@@ -0,0 +1,6 @@
+// This file is auto-generated by @hey-api/openapi-ts
+export { ApiError } from './core/ApiError';
+export { CancelablePromise, CancelError } from './core/CancelablePromise';
+export { OpenAPI, type OpenAPIConfig } from './core/OpenAPI';
+export * from './sdk.gen';
+export * from './types.gen';
\ No newline at end of file
diff --git a/frontend/src/client/schemas.gen.ts b/frontend/src/client/schemas.gen.ts
new file mode 100644
index 0000000000..5c0c9c4a4e
--- /dev/null
+++ b/frontend/src/client/schemas.gen.ts
@@ -0,0 +1,552 @@
+// This file is auto-generated by @hey-api/openapi-ts
+
+export const Body_login_login_access_tokenSchema = {
+ properties: {
+ grant_type: {
+ anyOf: [
+ {
+ type: 'string',
+ pattern: '^password$'
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Grant Type'
+ },
+ username: {
+ type: 'string',
+ title: 'Username'
+ },
+ password: {
+ type: 'string',
+ format: 'password',
+ title: 'Password'
+ },
+ scope: {
+ type: 'string',
+ title: 'Scope',
+ default: ''
+ },
+ client_id: {
+ anyOf: [
+ {
+ type: 'string'
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Client Id'
+ },
+ client_secret: {
+ anyOf: [
+ {
+ type: 'string'
+ },
+ {
+ type: 'null'
+ }
+ ],
+ format: 'password',
+ title: 'Client Secret'
+ }
+ },
+ type: 'object',
+ required: ['username', 'password'],
+ title: 'Body_login-login_access_token'
+} as const;
+
+export const HTTPValidationErrorSchema = {
+ properties: {
+ detail: {
+ items: {
+ '$ref': '#/components/schemas/ValidationError'
+ },
+ type: 'array',
+ title: 'Detail'
+ }
+ },
+ type: 'object',
+ title: 'HTTPValidationError'
+} as const;
+
+export const ItemCreateSchema = {
+ properties: {
+ title: {
+ type: 'string',
+ maxLength: 255,
+ minLength: 1,
+ title: 'Title'
+ },
+ description: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Description'
+ }
+ },
+ type: 'object',
+ required: ['title'],
+ title: 'ItemCreate'
+} as const;
+
+export const ItemPublicSchema = {
+ properties: {
+ title: {
+ type: 'string',
+ maxLength: 255,
+ minLength: 1,
+ title: 'Title'
+ },
+ description: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Description'
+ },
+ id: {
+ type: 'string',
+ format: 'uuid',
+ title: 'Id'
+ },
+ owner_id: {
+ type: 'string',
+ format: 'uuid',
+ title: 'Owner Id'
+ },
+ created_at: {
+ anyOf: [
+ {
+ type: 'string',
+ format: 'date-time'
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Created At'
+ }
+ },
+ type: 'object',
+ required: ['title', 'id', 'owner_id'],
+ title: 'ItemPublic'
+} as const;
+
+export const ItemUpdateSchema = {
+ properties: {
+ title: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255,
+ minLength: 1
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Title'
+ },
+ description: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Description'
+ }
+ },
+ type: 'object',
+ title: 'ItemUpdate'
+} as const;
+
+export const ItemsPublicSchema = {
+ properties: {
+ data: {
+ items: {
+ '$ref': '#/components/schemas/ItemPublic'
+ },
+ type: 'array',
+ title: 'Data'
+ },
+ count: {
+ type: 'integer',
+ title: 'Count'
+ }
+ },
+ type: 'object',
+ required: ['data', 'count'],
+ title: 'ItemsPublic'
+} as const;
+
+export const MessageSchema = {
+ properties: {
+ message: {
+ type: 'string',
+ title: 'Message'
+ }
+ },
+ type: 'object',
+ required: ['message'],
+ title: 'Message'
+} as const;
+
+export const NewPasswordSchema = {
+ properties: {
+ token: {
+ type: 'string',
+ title: 'Token'
+ },
+ new_password: {
+ type: 'string',
+ maxLength: 128,
+ minLength: 8,
+ title: 'New Password'
+ }
+ },
+ type: 'object',
+ required: ['token', 'new_password'],
+ title: 'NewPassword'
+} as const;
+
+export const PrivateUserCreateSchema = {
+ properties: {
+ email: {
+ type: 'string',
+ title: 'Email'
+ },
+ password: {
+ type: 'string',
+ title: 'Password'
+ },
+ full_name: {
+ type: 'string',
+ title: 'Full Name'
+ },
+ is_verified: {
+ type: 'boolean',
+ title: 'Is Verified',
+ default: false
+ }
+ },
+ type: 'object',
+ required: ['email', 'password', 'full_name'],
+ title: 'PrivateUserCreate'
+} as const;
+
+export const TokenSchema = {
+ properties: {
+ access_token: {
+ type: 'string',
+ title: 'Access Token'
+ },
+ token_type: {
+ type: 'string',
+ title: 'Token Type',
+ default: 'bearer'
+ }
+ },
+ type: 'object',
+ required: ['access_token'],
+ title: 'Token'
+} as const;
+
+export const UpdatePasswordSchema = {
+ properties: {
+ current_password: {
+ type: 'string',
+ maxLength: 128,
+ minLength: 8,
+ title: 'Current Password'
+ },
+ new_password: {
+ type: 'string',
+ maxLength: 128,
+ minLength: 8,
+ title: 'New Password'
+ }
+ },
+ type: 'object',
+ required: ['current_password', 'new_password'],
+ title: 'UpdatePassword'
+} as const;
+
+export const UserCreateSchema = {
+ properties: {
+ email: {
+ type: 'string',
+ maxLength: 255,
+ format: 'email',
+ title: 'Email'
+ },
+ is_active: {
+ type: 'boolean',
+ title: 'Is Active',
+ default: true
+ },
+ is_superuser: {
+ type: 'boolean',
+ title: 'Is Superuser',
+ default: false
+ },
+ full_name: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Full Name'
+ },
+ password: {
+ type: 'string',
+ maxLength: 128,
+ minLength: 8,
+ title: 'Password'
+ }
+ },
+ type: 'object',
+ required: ['email', 'password'],
+ title: 'UserCreate'
+} as const;
+
+export const UserPublicSchema = {
+ properties: {
+ email: {
+ type: 'string',
+ maxLength: 255,
+ format: 'email',
+ title: 'Email'
+ },
+ is_active: {
+ type: 'boolean',
+ title: 'Is Active',
+ default: true
+ },
+ is_superuser: {
+ type: 'boolean',
+ title: 'Is Superuser',
+ default: false
+ },
+ full_name: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Full Name'
+ },
+ id: {
+ type: 'string',
+ format: 'uuid',
+ title: 'Id'
+ },
+ created_at: {
+ anyOf: [
+ {
+ type: 'string',
+ format: 'date-time'
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Created At'
+ }
+ },
+ type: 'object',
+ required: ['email', 'id'],
+ title: 'UserPublic'
+} as const;
+
+export const UserRegisterSchema = {
+ properties: {
+ email: {
+ type: 'string',
+ maxLength: 255,
+ format: 'email',
+ title: 'Email'
+ },
+ password: {
+ type: 'string',
+ maxLength: 128,
+ minLength: 8,
+ title: 'Password'
+ },
+ full_name: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Full Name'
+ }
+ },
+ type: 'object',
+ required: ['email', 'password'],
+ title: 'UserRegister'
+} as const;
+
+export const UserUpdateSchema = {
+ properties: {
+ email: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255,
+ format: 'email'
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Email'
+ },
+ is_active: {
+ type: 'boolean',
+ title: 'Is Active',
+ default: true
+ },
+ is_superuser: {
+ type: 'boolean',
+ title: 'Is Superuser',
+ default: false
+ },
+ full_name: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Full Name'
+ },
+ password: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 128,
+ minLength: 8
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Password'
+ }
+ },
+ type: 'object',
+ title: 'UserUpdate'
+} as const;
+
+export const UserUpdateMeSchema = {
+ properties: {
+ full_name: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Full Name'
+ },
+ email: {
+ anyOf: [
+ {
+ type: 'string',
+ maxLength: 255,
+ format: 'email'
+ },
+ {
+ type: 'null'
+ }
+ ],
+ title: 'Email'
+ }
+ },
+ type: 'object',
+ title: 'UserUpdateMe'
+} as const;
+
+export const UsersPublicSchema = {
+ properties: {
+ data: {
+ items: {
+ '$ref': '#/components/schemas/UserPublic'
+ },
+ type: 'array',
+ title: 'Data'
+ },
+ count: {
+ type: 'integer',
+ title: 'Count'
+ }
+ },
+ type: 'object',
+ required: ['data', 'count'],
+ title: 'UsersPublic'
+} as const;
+
+export const ValidationErrorSchema = {
+ properties: {
+ loc: {
+ items: {
+ anyOf: [
+ {
+ type: 'string'
+ },
+ {
+ type: 'integer'
+ }
+ ]
+ },
+ type: 'array',
+ title: 'Location'
+ },
+ msg: {
+ type: 'string',
+ title: 'Message'
+ },
+ type: {
+ type: 'string',
+ title: 'Error Type'
+ }
+ },
+ type: 'object',
+ required: ['loc', 'msg', 'type'],
+ title: 'ValidationError'
+} as const;
\ No newline at end of file
diff --git a/frontend/src/client/sdk.gen.ts b/frontend/src/client/sdk.gen.ts
new file mode 100644
index 0000000000..ba79e3f726
--- /dev/null
+++ b/frontend/src/client/sdk.gen.ts
@@ -0,0 +1,468 @@
+// This file is auto-generated by @hey-api/openapi-ts
+
+import type { CancelablePromise } from './core/CancelablePromise';
+import { OpenAPI } from './core/OpenAPI';
+import { request as __request } from './core/request';
+import type { ItemsReadItemsData, ItemsReadItemsResponse, ItemsCreateItemData, ItemsCreateItemResponse, ItemsReadItemData, ItemsReadItemResponse, ItemsUpdateItemData, ItemsUpdateItemResponse, ItemsDeleteItemData, ItemsDeleteItemResponse, LoginLoginAccessTokenData, LoginLoginAccessTokenResponse, LoginTestTokenResponse, LoginRecoverPasswordData, LoginRecoverPasswordResponse, LoginResetPasswordData, LoginResetPasswordResponse, LoginRecoverPasswordHtmlContentData, LoginRecoverPasswordHtmlContentResponse, PrivateCreateUserData, PrivateCreateUserResponse, UsersReadUsersData, UsersReadUsersResponse, UsersCreateUserData, UsersCreateUserResponse, UsersReadUserMeResponse, UsersDeleteUserMeResponse, UsersUpdateUserMeData, UsersUpdateUserMeResponse, UsersUpdatePasswordMeData, UsersUpdatePasswordMeResponse, UsersRegisterUserData, UsersRegisterUserResponse, UsersReadUserByIdData, UsersReadUserByIdResponse, UsersUpdateUserData, UsersUpdateUserResponse, UsersDeleteUserData, UsersDeleteUserResponse, UtilsTestEmailData, UtilsTestEmailResponse, UtilsHealthCheckResponse } from './types.gen';
+
+export class ItemsService {
+ /**
+ * Read Items
+ * Retrieve items.
+ * @param data The data for the request.
+ * @param data.skip
+ * @param data.limit
+ * @returns ItemsPublic Successful Response
+ * @throws ApiError
+ */
+ public static readItems(data: ItemsReadItemsData = {}): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'GET',
+ url: '/api/v1/items/',
+ query: {
+ skip: data.skip,
+ limit: data.limit
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Create Item
+ * Create new item.
+ * @param data The data for the request.
+ * @param data.requestBody
+ * @returns ItemPublic Successful Response
+ * @throws ApiError
+ */
+ public static createItem(data: ItemsCreateItemData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/items/',
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Read Item
+ * Get item by ID.
+ * @param data The data for the request.
+ * @param data.id
+ * @returns ItemPublic Successful Response
+ * @throws ApiError
+ */
+ public static readItem(data: ItemsReadItemData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'GET',
+ url: '/api/v1/items/{id}',
+ path: {
+ id: data.id
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Update Item
+ * Update an item.
+ * @param data The data for the request.
+ * @param data.id
+ * @param data.requestBody
+ * @returns ItemPublic Successful Response
+ * @throws ApiError
+ */
+ public static updateItem(data: ItemsUpdateItemData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'PUT',
+ url: '/api/v1/items/{id}',
+ path: {
+ id: data.id
+ },
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Delete Item
+ * Delete an item.
+ * @param data The data for the request.
+ * @param data.id
+ * @returns Message Successful Response
+ * @throws ApiError
+ */
+ public static deleteItem(data: ItemsDeleteItemData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'DELETE',
+ url: '/api/v1/items/{id}',
+ path: {
+ id: data.id
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+}
+
+export class LoginService {
+ /**
+ * Login Access Token
+ * OAuth2 compatible token login, get an access token for future requests
+ * @param data The data for the request.
+ * @param data.formData
+ * @returns Token Successful Response
+ * @throws ApiError
+ */
+ public static loginAccessToken(data: LoginLoginAccessTokenData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/login/access-token',
+ formData: data.formData,
+ mediaType: 'application/x-www-form-urlencoded',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Test Token
+ * Test access token
+ * @returns UserPublic Successful Response
+ * @throws ApiError
+ */
+ public static testToken(): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/login/test-token'
+ });
+ }
+
+ /**
+ * Recover Password
+ * Password Recovery
+ * @param data The data for the request.
+ * @param data.email
+ * @returns Message Successful Response
+ * @throws ApiError
+ */
+ public static recoverPassword(data: LoginRecoverPasswordData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/password-recovery/{email}',
+ path: {
+ email: data.email
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Reset Password
+ * Reset password
+ * @param data The data for the request.
+ * @param data.requestBody
+ * @returns Message Successful Response
+ * @throws ApiError
+ */
+ public static resetPassword(data: LoginResetPasswordData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/reset-password/',
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Recover Password Html Content
+ * HTML Content for Password Recovery
+ * @param data The data for the request.
+ * @param data.email
+ * @returns string Successful Response
+ * @throws ApiError
+ */
+ public static recoverPasswordHtmlContent(data: LoginRecoverPasswordHtmlContentData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/password-recovery-html-content/{email}',
+ path: {
+ email: data.email
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+}
+
+export class PrivateService {
+ /**
+ * Create User
+ * Create a new user.
+ * @param data The data for the request.
+ * @param data.requestBody
+ * @returns UserPublic Successful Response
+ * @throws ApiError
+ */
+ public static createUser(data: PrivateCreateUserData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/private/users/',
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+}
+
+export class UsersService {
+ /**
+ * Read Users
+ * Retrieve users.
+ * @param data The data for the request.
+ * @param data.skip
+ * @param data.limit
+ * @returns UsersPublic Successful Response
+ * @throws ApiError
+ */
+ public static readUsers(data: UsersReadUsersData = {}): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'GET',
+ url: '/api/v1/users/',
+ query: {
+ skip: data.skip,
+ limit: data.limit
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Create User
+ * Create new user.
+ * @param data The data for the request.
+ * @param data.requestBody
+ * @returns UserPublic Successful Response
+ * @throws ApiError
+ */
+ public static createUser(data: UsersCreateUserData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/users/',
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Read User Me
+ * Get current user.
+ * @returns UserPublic Successful Response
+ * @throws ApiError
+ */
+ public static readUserMe(): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'GET',
+ url: '/api/v1/users/me'
+ });
+ }
+
+ /**
+ * Delete User Me
+ * Delete own user.
+ * @returns Message Successful Response
+ * @throws ApiError
+ */
+ public static deleteUserMe(): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'DELETE',
+ url: '/api/v1/users/me'
+ });
+ }
+
+ /**
+ * Update User Me
+ * Update own user.
+ * @param data The data for the request.
+ * @param data.requestBody
+ * @returns UserPublic Successful Response
+ * @throws ApiError
+ */
+ public static updateUserMe(data: UsersUpdateUserMeData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'PATCH',
+ url: '/api/v1/users/me',
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Update Password Me
+ * Update own password.
+ * @param data The data for the request.
+ * @param data.requestBody
+ * @returns Message Successful Response
+ * @throws ApiError
+ */
+ public static updatePasswordMe(data: UsersUpdatePasswordMeData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'PATCH',
+ url: '/api/v1/users/me/password',
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Register User
+ * Create new user without the need to be logged in.
+ * @param data The data for the request.
+ * @param data.requestBody
+ * @returns UserPublic Successful Response
+ * @throws ApiError
+ */
+ public static registerUser(data: UsersRegisterUserData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/users/signup',
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Read User By Id
+ * Get a specific user by id.
+ * @param data The data for the request.
+ * @param data.userId
+ * @returns UserPublic Successful Response
+ * @throws ApiError
+ */
+ public static readUserById(data: UsersReadUserByIdData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'GET',
+ url: '/api/v1/users/{user_id}',
+ path: {
+ user_id: data.userId
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Update User
+ * Update a user.
+ * @param data The data for the request.
+ * @param data.userId
+ * @param data.requestBody
+ * @returns UserPublic Successful Response
+ * @throws ApiError
+ */
+ public static updateUser(data: UsersUpdateUserData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'PATCH',
+ url: '/api/v1/users/{user_id}',
+ path: {
+ user_id: data.userId
+ },
+ body: data.requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Delete User
+ * Delete a user.
+ * @param data The data for the request.
+ * @param data.userId
+ * @returns Message Successful Response
+ * @throws ApiError
+ */
+ public static deleteUser(data: UsersDeleteUserData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'DELETE',
+ url: '/api/v1/users/{user_id}',
+ path: {
+ user_id: data.userId
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+}
+
+export class UtilsService {
+ /**
+ * Test Email
+ * Test emails.
+ * @param data The data for the request.
+ * @param data.emailTo
+ * @returns Message Successful Response
+ * @throws ApiError
+ */
+ public static testEmail(data: UtilsTestEmailData): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/api/v1/utils/test-email/',
+ query: {
+ email_to: data.emailTo
+ },
+ errors: {
+ 422: 'Validation Error'
+ }
+ });
+ }
+
+ /**
+ * Health Check
+ * @returns boolean Successful Response
+ * @throws ApiError
+ */
+ public static healthCheck(): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'GET',
+ url: '/api/v1/utils/health-check/'
+ });
+ }
+}
\ No newline at end of file
diff --git a/frontend/src/client/types.gen.ts b/frontend/src/client/types.gen.ts
new file mode 100644
index 0000000000..e62b56cad3
--- /dev/null
+++ b/frontend/src/client/types.gen.ts
@@ -0,0 +1,236 @@
+// This file is auto-generated by @hey-api/openapi-ts
+
+export type Body_login_login_access_token = {
+ grant_type?: (string | null);
+ username: string;
+ password: string;
+ scope?: string;
+ client_id?: (string | null);
+ client_secret?: (string | null);
+};
+
+export type HTTPValidationError = {
+ detail?: Array;
+};
+
+export type ItemCreate = {
+ title: string;
+ description?: (string | null);
+};
+
+export type ItemPublic = {
+ title: string;
+ description?: (string | null);
+ id: string;
+ owner_id: string;
+ created_at?: (string | null);
+};
+
+export type ItemsPublic = {
+ data: Array;
+ count: number;
+};
+
+export type ItemUpdate = {
+ title?: (string | null);
+ description?: (string | null);
+};
+
+export type Message = {
+ message: string;
+};
+
+export type NewPassword = {
+ token: string;
+ new_password: string;
+};
+
+export type PrivateUserCreate = {
+ email: string;
+ password: string;
+ full_name: string;
+ is_verified?: boolean;
+};
+
+export type Token = {
+ access_token: string;
+ token_type?: string;
+};
+
+export type UpdatePassword = {
+ current_password: string;
+ new_password: string;
+};
+
+export type UserCreate = {
+ email: string;
+ is_active?: boolean;
+ is_superuser?: boolean;
+ full_name?: (string | null);
+ password: string;
+};
+
+export type UserPublic = {
+ email: string;
+ is_active?: boolean;
+ is_superuser?: boolean;
+ full_name?: (string | null);
+ id: string;
+ created_at?: (string | null);
+};
+
+export type UserRegister = {
+ email: string;
+ password: string;
+ full_name?: (string | null);
+};
+
+export type UsersPublic = {
+ data: Array;
+ count: number;
+};
+
+export type UserUpdate = {
+ email?: (string | null);
+ is_active?: boolean;
+ is_superuser?: boolean;
+ full_name?: (string | null);
+ password?: (string | null);
+};
+
+export type UserUpdateMe = {
+ full_name?: (string | null);
+ email?: (string | null);
+};
+
+export type ValidationError = {
+ loc: Array<(string | number)>;
+ msg: string;
+ type: string;
+};
+
+export type ItemsReadItemsData = {
+ limit?: number;
+ skip?: number;
+};
+
+export type ItemsReadItemsResponse = (ItemsPublic);
+
+export type ItemsCreateItemData = {
+ requestBody: ItemCreate;
+};
+
+export type ItemsCreateItemResponse = (ItemPublic);
+
+export type ItemsReadItemData = {
+ id: string;
+};
+
+export type ItemsReadItemResponse = (ItemPublic);
+
+export type ItemsUpdateItemData = {
+ id: string;
+ requestBody: ItemUpdate;
+};
+
+export type ItemsUpdateItemResponse = (ItemPublic);
+
+export type ItemsDeleteItemData = {
+ id: string;
+};
+
+export type ItemsDeleteItemResponse = (Message);
+
+export type LoginLoginAccessTokenData = {
+ formData: Body_login_login_access_token;
+};
+
+export type LoginLoginAccessTokenResponse = (Token);
+
+export type LoginTestTokenResponse = (UserPublic);
+
+export type LoginRecoverPasswordData = {
+ email: string;
+};
+
+export type LoginRecoverPasswordResponse = (Message);
+
+export type LoginResetPasswordData = {
+ requestBody: NewPassword;
+};
+
+export type LoginResetPasswordResponse = (Message);
+
+export type LoginRecoverPasswordHtmlContentData = {
+ email: string;
+};
+
+export type LoginRecoverPasswordHtmlContentResponse = (string);
+
+export type PrivateCreateUserData = {
+ requestBody: PrivateUserCreate;
+};
+
+export type PrivateCreateUserResponse = (UserPublic);
+
+export type UsersReadUsersData = {
+ limit?: number;
+ skip?: number;
+};
+
+export type UsersReadUsersResponse = (UsersPublic);
+
+export type UsersCreateUserData = {
+ requestBody: UserCreate;
+};
+
+export type UsersCreateUserResponse = (UserPublic);
+
+export type UsersReadUserMeResponse = (UserPublic);
+
+export type UsersDeleteUserMeResponse = (Message);
+
+export type UsersUpdateUserMeData = {
+ requestBody: UserUpdateMe;
+};
+
+export type UsersUpdateUserMeResponse = (UserPublic);
+
+export type UsersUpdatePasswordMeData = {
+ requestBody: UpdatePassword;
+};
+
+export type UsersUpdatePasswordMeResponse = (Message);
+
+export type UsersRegisterUserData = {
+ requestBody: UserRegister;
+};
+
+export type UsersRegisterUserResponse = (UserPublic);
+
+export type UsersReadUserByIdData = {
+ userId: string;
+};
+
+export type UsersReadUserByIdResponse = (UserPublic);
+
+export type UsersUpdateUserData = {
+ requestBody: UserUpdate;
+ userId: string;
+};
+
+export type UsersUpdateUserResponse = (UserPublic);
+
+export type UsersDeleteUserData = {
+ userId: string;
+};
+
+export type UsersDeleteUserResponse = (Message);
+
+export type UtilsTestEmailData = {
+ emailTo: string;
+};
+
+export type UtilsTestEmailResponse = (Message);
+
+export type UtilsHealthCheckResponse = (boolean);
\ No newline at end of file
diff --git a/frontend/src/components/Admin/AddUser.tsx b/frontend/src/components/Admin/AddUser.tsx
new file mode 100644
index 0000000000..a0b534bd96
--- /dev/null
+++ b/frontend/src/components/Admin/AddUser.tsx
@@ -0,0 +1,238 @@
+import { zodResolver } from "@hookform/resolvers/zod"
+import { useMutation, useQueryClient } from "@tanstack/react-query"
+import { Plus } from "lucide-react"
+import { useState } from "react"
+import { useForm } from "react-hook-form"
+import { z } from "zod"
+
+import { type UserCreate, UsersService } from "@/client"
+import { Button } from "@/components/ui/button"
+import { Checkbox } from "@/components/ui/checkbox"
+import {
+ Dialog,
+ DialogClose,
+ DialogContent,
+ DialogDescription,
+ DialogFooter,
+ DialogHeader,
+ DialogTitle,
+ DialogTrigger,
+} from "@/components/ui/dialog"
+import {
+ Form,
+ FormControl,
+ FormField,
+ FormItem,
+ FormLabel,
+ FormMessage,
+} from "@/components/ui/form"
+import { Input } from "@/components/ui/input"
+import { LoadingButton } from "@/components/ui/loading-button"
+import useCustomToast from "@/hooks/useCustomToast"
+import { handleError } from "@/utils"
+
+const formSchema = z
+ .object({
+ email: z.email({ message: "Invalid email address" }),
+ full_name: z.string().optional(),
+ password: z
+ .string()
+ .min(1, { message: "Password is required" })
+ .min(8, { message: "Password must be at least 8 characters" }),
+ confirm_password: z
+ .string()
+ .min(1, { message: "Please confirm your password" }),
+ is_superuser: z.boolean(),
+ is_active: z.boolean(),
+ })
+ .refine((data) => data.password === data.confirm_password, {
+ message: "The passwords don't match",
+ path: ["confirm_password"],
+ })
+
+type FormData = z.infer
+
+const AddUser = () => {
+ const [isOpen, setIsOpen] = useState(false)
+ const queryClient = useQueryClient()
+ const { showSuccessToast, showErrorToast } = useCustomToast()
+
+ const form = useForm({
+ resolver: zodResolver(formSchema),
+ mode: "onBlur",
+ criteriaMode: "all",
+ defaultValues: {
+ email: "",
+ full_name: "",
+ password: "",
+ confirm_password: "",
+ is_superuser: false,
+ is_active: false,
+ },
+ })
+
+ const mutation = useMutation({
+ mutationFn: (data: UserCreate) =>
+ UsersService.createUser({ requestBody: data }),
+ onSuccess: () => {
+ showSuccessToast("User created successfully")
+ form.reset()
+ setIsOpen(false)
+ },
+ onError: handleError.bind(showErrorToast),
+ onSettled: () => {
+ queryClient.invalidateQueries({ queryKey: ["users"] })
+ },
+ })
+
+ const onSubmit = (data: FormData) => {
+ mutation.mutate(data)
+ }
+
+ return (
+
+ )
+}
+
+export default AddUser
diff --git a/frontend/src/components/Admin/DeleteUser.tsx b/frontend/src/components/Admin/DeleteUser.tsx
new file mode 100644
index 0000000000..4ffd023e77
--- /dev/null
+++ b/frontend/src/components/Admin/DeleteUser.tsx
@@ -0,0 +1,95 @@
+import { useMutation, useQueryClient } from "@tanstack/react-query"
+import { Trash2 } from "lucide-react"
+import { useState } from "react"
+import { useForm } from "react-hook-form"
+
+import { UsersService } from "@/client"
+import { Button } from "@/components/ui/button"
+import {
+ Dialog,
+ DialogClose,
+ DialogContent,
+ DialogDescription,
+ DialogFooter,
+ DialogHeader,
+ DialogTitle,
+} from "@/components/ui/dialog"
+import { DropdownMenuItem } from "@/components/ui/dropdown-menu"
+import { LoadingButton } from "@/components/ui/loading-button"
+import useCustomToast from "@/hooks/useCustomToast"
+import { handleError } from "@/utils"
+
+interface DeleteUserProps {
+ id: string
+ onSuccess: () => void
+}
+
+const DeleteUser = ({ id, onSuccess }: DeleteUserProps) => {
+ const [isOpen, setIsOpen] = useState(false)
+ const queryClient = useQueryClient()
+ const { showSuccessToast, showErrorToast } = useCustomToast()
+ const { handleSubmit } = useForm()
+
+ const deleteUser = async (id: string) => {
+ await UsersService.deleteUser({ userId: id })
+ }
+
+ const mutation = useMutation({
+ mutationFn: deleteUser,
+ onSuccess: () => {
+ showSuccessToast("The user was deleted successfully")
+ setIsOpen(false)
+ onSuccess()
+ },
+ onError: handleError.bind(showErrorToast),
+ onSettled: () => {
+ queryClient.invalidateQueries()
+ },
+ })
+
+ const onSubmit = async () => {
+ mutation.mutate(id)
+ }
+
+ return (
+
+ )
+}
+
+export default DeleteUser
diff --git a/frontend/src/components/Admin/EditUser.tsx b/frontend/src/components/Admin/EditUser.tsx
new file mode 100644
index 0000000000..172904f695
--- /dev/null
+++ b/frontend/src/components/Admin/EditUser.tsx
@@ -0,0 +1,239 @@
+import { zodResolver } from "@hookform/resolvers/zod"
+import { useMutation, useQueryClient } from "@tanstack/react-query"
+import { Pencil } from "lucide-react"
+import { useState } from "react"
+import { useForm } from "react-hook-form"
+import { z } from "zod"
+
+import { type UserPublic, UsersService } from "@/client"
+import { Button } from "@/components/ui/button"
+import { Checkbox } from "@/components/ui/checkbox"
+import {
+ Dialog,
+ DialogClose,
+ DialogContent,
+ DialogDescription,
+ DialogFooter,
+ DialogHeader,
+ DialogTitle,
+} from "@/components/ui/dialog"
+import { DropdownMenuItem } from "@/components/ui/dropdown-menu"
+import {
+ Form,
+ FormControl,
+ FormField,
+ FormItem,
+ FormLabel,
+ FormMessage,
+} from "@/components/ui/form"
+import { Input } from "@/components/ui/input"
+import { LoadingButton } from "@/components/ui/loading-button"
+import useCustomToast from "@/hooks/useCustomToast"
+import { handleError } from "@/utils"
+
+const formSchema = z
+ .object({
+ email: z.email({ message: "Invalid email address" }),
+ full_name: z.string().optional(),
+ password: z
+ .string()
+ .min(8, { message: "Password must be at least 8 characters" })
+ .optional()
+ .or(z.literal("")),
+ confirm_password: z.string().optional(),
+ is_superuser: z.boolean().optional(),
+ is_active: z.boolean().optional(),
+ })
+ .refine((data) => !data.password || data.password === data.confirm_password, {
+ message: "The passwords don't match",
+ path: ["confirm_password"],
+ })
+
+type FormData = z.infer
+
+interface EditUserProps {
+ user: UserPublic
+ onSuccess: () => void
+}
+
+const EditUser = ({ user, onSuccess }: EditUserProps) => {
+ const [isOpen, setIsOpen] = useState(false)
+ const queryClient = useQueryClient()
+ const { showSuccessToast, showErrorToast } = useCustomToast()
+
+ const form = useForm({
+ resolver: zodResolver(formSchema),
+ mode: "onBlur",
+ criteriaMode: "all",
+ defaultValues: {
+ email: user.email,
+ full_name: user.full_name ?? undefined,
+ is_superuser: user.is_superuser,
+ is_active: user.is_active,
+ },
+ })
+
+ const mutation = useMutation({
+ mutationFn: (data: FormData) =>
+ UsersService.updateUser({ userId: user.id, requestBody: data }),
+ onSuccess: () => {
+ showSuccessToast("User updated successfully")
+ setIsOpen(false)
+ onSuccess()
+ },
+ onError: handleError.bind(showErrorToast),
+ onSettled: () => {
+ queryClient.invalidateQueries({ queryKey: ["users"] })
+ },
+ })
+
+ const onSubmit = (data: FormData) => {
+ // exclude confirm_password from submission data and remove password if empty
+ const { confirm_password: _, ...submitData } = data
+ if (!submitData.password) {
+ delete submitData.password
+ }
+ mutation.mutate(submitData)
+ }
+
+ return (
+
+ )
+}
+
+export default EditUser
diff --git a/frontend/src/components/Admin/UserActionsMenu.tsx b/frontend/src/components/Admin/UserActionsMenu.tsx
new file mode 100644
index 0000000000..01f71cbb7a
--- /dev/null
+++ b/frontend/src/components/Admin/UserActionsMenu.tsx
@@ -0,0 +1,40 @@
+import { EllipsisVertical } from "lucide-react"
+import { useState } from "react"
+
+import type { UserPublic } from "@/client"
+import { Button } from "@/components/ui/button"
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuTrigger,
+} from "@/components/ui/dropdown-menu"
+import useAuth from "@/hooks/useAuth"
+import DeleteUser from "./DeleteUser"
+import EditUser from "./EditUser"
+
+interface UserActionsMenuProps {
+ user: UserPublic
+}
+
+export const UserActionsMenu = ({ user }: UserActionsMenuProps) => {
+ const [open, setOpen] = useState(false)
+ const { user: currentUser } = useAuth()
+
+ if (user.id === currentUser?.id) {
+ return null
+ }
+
+ return (
+
+
+
+
+
+ setOpen(false)} />
+ setOpen(false)} />
+
+
+ )
+}
diff --git a/frontend/src/components/Admin/columns.tsx b/frontend/src/components/Admin/columns.tsx
new file mode 100644
index 0000000000..8b0fa13eef
--- /dev/null
+++ b/frontend/src/components/Admin/columns.tsx
@@ -0,0 +1,76 @@
+import type { ColumnDef } from "@tanstack/react-table"
+
+import type { UserPublic } from "@/client"
+import { Badge } from "@/components/ui/badge"
+import { cn } from "@/lib/utils"
+import { UserActionsMenu } from "./UserActionsMenu"
+
+export type UserTableData = UserPublic & {
+ isCurrentUser: boolean
+}
+
+export const columns: ColumnDef[] = [
+ {
+ accessorKey: "full_name",
+ header: "Full Name",
+ cell: ({ row }) => {
+ const fullName = row.original.full_name
+ return (
+